sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +140 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +315 -0
- sqlspec/_typing.py +700 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_typing.py +82 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +1273 -0
- sqlspec/adapters/adbc/config.py +295 -0
- sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/core.py +735 -0
- sqlspec/adapters/adbc/data_dictionary.py +334 -0
- sqlspec/adapters/adbc/driver.py +529 -0
- sqlspec/adapters/adbc/events/__init__.py +5 -0
- sqlspec/adapters/adbc/events/store.py +285 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +502 -0
- sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/type_converter.py +140 -0
- sqlspec/adapters/aiosqlite/__init__.py +25 -0
- sqlspec/adapters/aiosqlite/_typing.py +82 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +818 -0
- sqlspec/adapters/aiosqlite/config.py +334 -0
- sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/aiosqlite/core.py +315 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
- sqlspec/adapters/aiosqlite/driver.py +313 -0
- sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/events/store.py +20 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
- sqlspec/adapters/aiosqlite/pool.py +533 -0
- sqlspec/adapters/asyncmy/__init__.py +21 -0
- sqlspec/adapters/asyncmy/_typing.py +87 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +703 -0
- sqlspec/adapters/asyncmy/config.py +302 -0
- sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncmy/core.py +360 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
- sqlspec/adapters/asyncmy/driver.py +383 -0
- sqlspec/adapters/asyncmy/events/__init__.py +5 -0
- sqlspec/adapters/asyncmy/events/store.py +104 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +19 -0
- sqlspec/adapters/asyncpg/_typing.py +88 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +748 -0
- sqlspec/adapters/asyncpg/config.py +569 -0
- sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncpg/core.py +367 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
- sqlspec/adapters/asyncpg/driver.py +487 -0
- sqlspec/adapters/asyncpg/events/__init__.py +6 -0
- sqlspec/adapters/asyncpg/events/backend.py +286 -0
- sqlspec/adapters/asyncpg/events/store.py +40 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +251 -0
- sqlspec/adapters/bigquery/__init__.py +14 -0
- sqlspec/adapters/bigquery/_typing.py +86 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +827 -0
- sqlspec/adapters/bigquery/config.py +353 -0
- sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/core.py +715 -0
- sqlspec/adapters/bigquery/data_dictionary.py +128 -0
- sqlspec/adapters/bigquery/driver.py +548 -0
- sqlspec/adapters/bigquery/events/__init__.py +5 -0
- sqlspec/adapters/bigquery/events/store.py +139 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +325 -0
- sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/type_converter.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
- sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
- sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
- sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
- sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
- sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
- sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
- sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
- sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
- sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
- sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
- sqlspec/adapters/cockroach_psycopg/config.py +484 -0
- sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_psycopg/core.py +63 -0
- sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
- sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
- sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
- sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
- sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
- sqlspec/adapters/duckdb/__init__.py +25 -0
- sqlspec/adapters/duckdb/_typing.py +81 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +850 -0
- sqlspec/adapters/duckdb/config.py +463 -0
- sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/core.py +257 -0
- sqlspec/adapters/duckdb/data_dictionary.py +140 -0
- sqlspec/adapters/duckdb/driver.py +430 -0
- sqlspec/adapters/duckdb/events/__init__.py +5 -0
- sqlspec/adapters/duckdb/events/store.py +57 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +330 -0
- sqlspec/adapters/duckdb/pool.py +293 -0
- sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/type_converter.py +118 -0
- sqlspec/adapters/mock/__init__.py +72 -0
- sqlspec/adapters/mock/_typing.py +147 -0
- sqlspec/adapters/mock/config.py +483 -0
- sqlspec/adapters/mock/core.py +319 -0
- sqlspec/adapters/mock/data_dictionary.py +366 -0
- sqlspec/adapters/mock/driver.py +721 -0
- sqlspec/adapters/mysqlconnector/__init__.py +36 -0
- sqlspec/adapters/mysqlconnector/_typing.py +141 -0
- sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
- sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
- sqlspec/adapters/mysqlconnector/config.py +394 -0
- sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/mysqlconnector/core.py +303 -0
- sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
- sqlspec/adapters/mysqlconnector/driver.py +483 -0
- sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
- sqlspec/adapters/mysqlconnector/events/store.py +98 -0
- sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
- sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
- sqlspec/adapters/oracledb/__init__.py +60 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
- sqlspec/adapters/oracledb/_typing.py +182 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
- sqlspec/adapters/oracledb/adk/__init__.py +10 -0
- sqlspec/adapters/oracledb/adk/store.py +2369 -0
- sqlspec/adapters/oracledb/config.py +550 -0
- sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/core.py +543 -0
- sqlspec/adapters/oracledb/data_dictionary.py +536 -0
- sqlspec/adapters/oracledb/driver.py +1229 -0
- sqlspec/adapters/oracledb/events/__init__.py +16 -0
- sqlspec/adapters/oracledb/events/backend.py +347 -0
- sqlspec/adapters/oracledb/events/store.py +420 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +781 -0
- sqlspec/adapters/oracledb/migrations.py +535 -0
- sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/type_converter.py +211 -0
- sqlspec/adapters/psqlpy/__init__.py +17 -0
- sqlspec/adapters/psqlpy/_typing.py +79 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +766 -0
- sqlspec/adapters/psqlpy/config.py +304 -0
- sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/core.py +480 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
- sqlspec/adapters/psqlpy/driver.py +438 -0
- sqlspec/adapters/psqlpy/events/__init__.py +6 -0
- sqlspec/adapters/psqlpy/events/backend.py +310 -0
- sqlspec/adapters/psqlpy/events/store.py +20 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +270 -0
- sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/type_converter.py +113 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_typing.py +164 -0
- sqlspec/adapters/psycopg/adk/__init__.py +10 -0
- sqlspec/adapters/psycopg/adk/store.py +1387 -0
- sqlspec/adapters/psycopg/config.py +576 -0
- sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/core.py +450 -0
- sqlspec/adapters/psycopg/data_dictionary.py +289 -0
- sqlspec/adapters/psycopg/driver.py +975 -0
- sqlspec/adapters/psycopg/events/__init__.py +20 -0
- sqlspec/adapters/psycopg/events/backend.py +458 -0
- sqlspec/adapters/psycopg/events/store.py +42 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +552 -0
- sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/type_converter.py +93 -0
- sqlspec/adapters/pymysql/__init__.py +21 -0
- sqlspec/adapters/pymysql/_typing.py +71 -0
- sqlspec/adapters/pymysql/adk/__init__.py +5 -0
- sqlspec/adapters/pymysql/adk/store.py +540 -0
- sqlspec/adapters/pymysql/config.py +195 -0
- sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/pymysql/core.py +299 -0
- sqlspec/adapters/pymysql/data_dictionary.py +122 -0
- sqlspec/adapters/pymysql/driver.py +259 -0
- sqlspec/adapters/pymysql/events/__init__.py +5 -0
- sqlspec/adapters/pymysql/events/store.py +50 -0
- sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
- sqlspec/adapters/pymysql/litestar/store.py +232 -0
- sqlspec/adapters/pymysql/pool.py +137 -0
- sqlspec/adapters/spanner/__init__.py +40 -0
- sqlspec/adapters/spanner/_typing.py +86 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +732 -0
- sqlspec/adapters/spanner/config.py +352 -0
- sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/core.py +188 -0
- sqlspec/adapters/spanner/data_dictionary.py +120 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
- sqlspec/adapters/spanner/driver.py +373 -0
- sqlspec/adapters/spanner/events/__init__.py +5 -0
- sqlspec/adapters/spanner/events/store.py +187 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +291 -0
- sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/type_converter.py +331 -0
- sqlspec/adapters/sqlite/__init__.py +19 -0
- sqlspec/adapters/sqlite/_typing.py +80 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +958 -0
- sqlspec/adapters/sqlite/config.py +280 -0
- sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/core.py +312 -0
- sqlspec/adapters/sqlite/data_dictionary.py +202 -0
- sqlspec/adapters/sqlite/driver.py +359 -0
- sqlspec/adapters/sqlite/events/__init__.py +5 -0
- sqlspec/adapters/sqlite/events/store.py +20 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +316 -0
- sqlspec/adapters/sqlite/pool.py +198 -0
- sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/type_converter.py +114 -0
- sqlspec/base.py +747 -0
- sqlspec/builder/__init__.py +179 -0
- sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_base.py +1022 -0
- sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_column.py +521 -0
- sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_delete.py +95 -0
- sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_dml.py +365 -0
- sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_explain.py +579 -0
- sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_factory.py +1697 -0
- sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_insert.py +328 -0
- sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_join.py +499 -0
- sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_merge.py +821 -0
- sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_select.py +1660 -0
- sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_temporal.py +139 -0
- sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_update.py +173 -0
- sqlspec/builder/_vector_expressions.py +267 -0
- sqlspec/cli.py +911 -0
- sqlspec/config.py +1755 -0
- sqlspec/core/__init__.py +374 -0
- sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/_correlation.py +176 -0
- sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +1069 -0
- sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +954 -0
- sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/explain.py +275 -0
- sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +952 -0
- sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +262 -0
- sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +71 -0
- sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_alignment.py +270 -0
- sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_converter.py +543 -0
- sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_processor.py +505 -0
- sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_registry.py +206 -0
- sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_transformers.py +292 -0
- sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_types.py +499 -0
- sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_validator.py +180 -0
- sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/pipeline.py +319 -0
- sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/query_modifiers.py +437 -0
- sqlspec/core/result/__init__.py +23 -0
- sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_base.py +1121 -0
- sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_io.py +28 -0
- sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +966 -0
- sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +1503 -0
- sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/type_converter.py +339 -0
- sqlspec/data_dictionary/__init__.py +22 -0
- sqlspec/data_dictionary/_loader.py +123 -0
- sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_registry.py +74 -0
- sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_types.py +121 -0
- sqlspec/data_dictionary/dialects/__init__.py +21 -0
- sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/bigquery.py +49 -0
- sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
- sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/duckdb.py +47 -0
- sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/mysql.py +42 -0
- sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/oracle.py +34 -0
- sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/postgres.py +46 -0
- sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/spanner.py +37 -0
- sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/sqlite.py +42 -0
- sqlspec/data_dictionary/sql/.gitkeep +0 -0
- sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
- sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
- sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
- sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
- sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
- sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
- sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
- sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
- sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
- sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
- sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
- sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
- sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
- sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
- sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
- sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
- sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
- sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
- sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
- sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
- sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
- sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
- sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
- sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
- sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
- sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
- sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
- sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
- sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
- sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
- sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
- sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
- sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
- sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
- sqlspec/driver/__init__.py +32 -0
- sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_async.py +1737 -0
- sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_common.py +1478 -0
- sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sql_helpers.py +148 -0
- sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_storage_helpers.py +144 -0
- sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sync.py +1710 -0
- sqlspec/exceptions.py +338 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +70 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/memory/__init__.py +69 -0
- sqlspec/extensions/adk/memory/_types.py +30 -0
- sqlspec/extensions/adk/memory/converters.py +149 -0
- sqlspec/extensions/adk/memory/service.py +217 -0
- sqlspec/extensions/adk/memory/store.py +569 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +225 -0
- sqlspec/extensions/adk/store.py +567 -0
- sqlspec/extensions/events/__init__.py +51 -0
- sqlspec/extensions/events/_channel.py +703 -0
- sqlspec/extensions/events/_hints.py +45 -0
- sqlspec/extensions/events/_models.py +23 -0
- sqlspec/extensions/events/_payload.py +69 -0
- sqlspec/extensions/events/_protocols.py +134 -0
- sqlspec/extensions/events/_queue.py +461 -0
- sqlspec/extensions/events/_store.py +209 -0
- sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
- sqlspec/extensions/events/migrations/__init__.py +3 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +351 -0
- sqlspec/extensions/fastapi/providers.py +607 -0
- sqlspec/extensions/flask/__init__.py +37 -0
- sqlspec/extensions/flask/_state.py +76 -0
- sqlspec/extensions/flask/_utils.py +71 -0
- sqlspec/extensions/flask/extension.py +519 -0
- sqlspec/extensions/litestar/__init__.py +28 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/channels.py +165 -0
- sqlspec/extensions/litestar/cli.py +102 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +671 -0
- sqlspec/extensions/litestar/providers.py +526 -0
- sqlspec/extensions/litestar/store.py +296 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +113 -0
- sqlspec/extensions/starlette/__init__.py +19 -0
- sqlspec/extensions/starlette/_state.py +30 -0
- sqlspec/extensions/starlette/_utils.py +96 -0
- sqlspec/extensions/starlette/extension.py +346 -0
- sqlspec/extensions/starlette/middleware.py +235 -0
- sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +702 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +731 -0
- sqlspec/migrations/commands.py +1232 -0
- sqlspec/migrations/context.py +157 -0
- sqlspec/migrations/fix.py +204 -0
- sqlspec/migrations/loaders.py +443 -0
- sqlspec/migrations/runner.py +1172 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +611 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +207 -0
- sqlspec/migrations/version.py +446 -0
- sqlspec/observability/__init__.py +55 -0
- sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_common.py +77 -0
- sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_config.py +348 -0
- sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_diagnostics.py +74 -0
- sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_dispatcher.py +152 -0
- sqlspec/observability/_formatters/__init__.py +13 -0
- sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_aws.py +102 -0
- sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_azure.py +96 -0
- sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_base.py +57 -0
- sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_gcp.py +131 -0
- sqlspec/observability/_formatting.py +58 -0
- sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_observer.py +357 -0
- sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_runtime.py +420 -0
- sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_sampling.py +188 -0
- sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_spans.py +161 -0
- sqlspec/protocols.py +916 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +48 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +253 -0
- sqlspec/storage/backends/fsspec.py +529 -0
- sqlspec/storage/backends/local.py +441 -0
- sqlspec/storage/backends/obstore.py +916 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +582 -0
- sqlspec/storage/registry.py +301 -0
- sqlspec/typing.py +395 -0
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/arrow_helpers.py +318 -0
- sqlspec/utils/config_tools.py +332 -0
- sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/correlation.py +134 -0
- sqlspec/utils/deprecation.py +190 -0
- sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +258 -0
- sqlspec/utils/logging.py +222 -0
- sqlspec/utils/module_loader.py +306 -0
- sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/portal.py +375 -0
- sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/schema.py +485 -0
- sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/serializers.py +408 -0
- sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +311 -0
- sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_converters.py +128 -0
- sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1360 -0
- sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/uuids.py +225 -0
- sqlspec-0.36.0.dist-info/METADATA +205 -0
- sqlspec-0.36.0.dist-info/RECORD +531 -0
- sqlspec-0.36.0.dist-info/WHEEL +7 -0
- sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1478 @@
|
|
|
1
|
+
"""Common driver attributes and utilities."""
|
|
2
|
+
|
|
3
|
+
import graphlib
|
|
4
|
+
import hashlib
|
|
5
|
+
import logging
|
|
6
|
+
import re
|
|
7
|
+
from contextlib import suppress
|
|
8
|
+
from time import perf_counter
|
|
9
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Final, Literal, NamedTuple, NoReturn, Protocol, cast, overload
|
|
10
|
+
|
|
11
|
+
from mypy_extensions import mypyc_attr
|
|
12
|
+
from sqlglot import exp
|
|
13
|
+
|
|
14
|
+
from sqlspec.builder import QueryBuilder
|
|
15
|
+
from sqlspec.core import (
|
|
16
|
+
SQL,
|
|
17
|
+
CachedStatement,
|
|
18
|
+
ParameterStyle,
|
|
19
|
+
SQLResult,
|
|
20
|
+
Statement,
|
|
21
|
+
StatementConfig,
|
|
22
|
+
TypedParameter,
|
|
23
|
+
get_cache,
|
|
24
|
+
get_cache_config,
|
|
25
|
+
split_sql_script,
|
|
26
|
+
)
|
|
27
|
+
from sqlspec.core.metrics import StackExecutionMetrics
|
|
28
|
+
from sqlspec.core.parameters import fingerprint_parameters
|
|
29
|
+
from sqlspec.data_dictionary._loader import get_data_dictionary_loader
|
|
30
|
+
from sqlspec.data_dictionary._registry import get_dialect_config
|
|
31
|
+
from sqlspec.driver._storage_helpers import CAPABILITY_HINTS
|
|
32
|
+
from sqlspec.exceptions import ImproperConfigurationError, NotFoundError, SQLFileNotFoundError, StorageCapabilityError
|
|
33
|
+
from sqlspec.observability import ObservabilityRuntime, get_trace_context, resolve_db_system
|
|
34
|
+
from sqlspec.protocols import HasDataProtocol, HasExecuteProtocol, StatementProtocol
|
|
35
|
+
from sqlspec.typing import VersionCacheResult, VersionInfo
|
|
36
|
+
from sqlspec.utils.logging import get_logger, log_with_context
|
|
37
|
+
from sqlspec.utils.schema import to_schema as _to_schema_impl
|
|
38
|
+
from sqlspec.utils.type_guards import (
|
|
39
|
+
has_array_interface,
|
|
40
|
+
has_cursor_metadata,
|
|
41
|
+
has_dtype_str,
|
|
42
|
+
has_statement_type,
|
|
43
|
+
has_typecode,
|
|
44
|
+
has_typecode_and_len,
|
|
45
|
+
is_statement_filter,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
if TYPE_CHECKING:
|
|
49
|
+
from collections.abc import Callable, Sequence
|
|
50
|
+
|
|
51
|
+
from sqlspec.core import FilterTypeT, StatementFilter
|
|
52
|
+
from sqlspec.core.parameters._types import ConvertedParameters
|
|
53
|
+
from sqlspec.core.stack import StatementStack
|
|
54
|
+
from sqlspec.data_dictionary._types import DialectConfig
|
|
55
|
+
from sqlspec.storage import AsyncStoragePipeline, StorageCapabilities, SyncStoragePipeline
|
|
56
|
+
from sqlspec.typing import ForeignKeyMetadata, SchemaT, StatementParameters
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
__all__ = (
|
|
60
|
+
"DEFAULT_EXECUTION_RESULT",
|
|
61
|
+
"EXEC_CURSOR_RESULT",
|
|
62
|
+
"EXEC_ROWCOUNT_OVERRIDE",
|
|
63
|
+
"EXEC_SPECIAL_DATA",
|
|
64
|
+
"VERSION_GROUPS_MIN_FOR_MINOR",
|
|
65
|
+
"VERSION_GROUPS_MIN_FOR_PATCH",
|
|
66
|
+
"AsyncExceptionHandler",
|
|
67
|
+
"CommonDriverAttributesMixin",
|
|
68
|
+
"DataDictionaryDialectMixin",
|
|
69
|
+
"DataDictionaryMixin",
|
|
70
|
+
"ExecutionResult",
|
|
71
|
+
"ScriptExecutionResult",
|
|
72
|
+
"StackExecutionObserver",
|
|
73
|
+
"SyncExceptionHandler",
|
|
74
|
+
"describe_stack_statement",
|
|
75
|
+
"handle_single_row_error",
|
|
76
|
+
"hash_stack_operations",
|
|
77
|
+
"make_cache_key_hashable",
|
|
78
|
+
"resolve_db_system",
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _parameter_sort_key(item: "tuple[str, object]") -> float:
|
|
83
|
+
key = item[0]
|
|
84
|
+
if key.isdigit():
|
|
85
|
+
return float(int(key))
|
|
86
|
+
if key.startswith("param_"):
|
|
87
|
+
suffix = key[6:]
|
|
88
|
+
if suffix.isdigit():
|
|
89
|
+
return float(int(suffix))
|
|
90
|
+
return float("inf")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _select_dominant_style(
|
|
94
|
+
style_counts: "dict[ParameterStyle, int]", precedence: "dict[ParameterStyle, int]"
|
|
95
|
+
) -> "ParameterStyle":
|
|
96
|
+
best_style: ParameterStyle | None = None
|
|
97
|
+
best_count = -1
|
|
98
|
+
best_precedence = 100
|
|
99
|
+
for style, count in style_counts.items():
|
|
100
|
+
current_precedence = precedence.get(style, 99)
|
|
101
|
+
if count > best_count or (count == best_count and current_precedence < best_precedence):
|
|
102
|
+
best_style = style
|
|
103
|
+
best_count = count
|
|
104
|
+
best_precedence = current_precedence
|
|
105
|
+
return cast("ParameterStyle", best_style)
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class SyncExceptionHandler(Protocol):
|
|
109
|
+
"""Protocol for synchronous exception handlers with deferred exception pattern.
|
|
110
|
+
|
|
111
|
+
Exception handlers implement this protocol to avoid ABI boundary violations
|
|
112
|
+
with mypyc-compiled code. Instead of raising exceptions from __exit__,
|
|
113
|
+
handlers store mapped exceptions in pending_exception for the caller to raise.
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
pending_exception: Exception | None
|
|
117
|
+
|
|
118
|
+
def __enter__(self) -> "SyncExceptionHandler": ...
|
|
119
|
+
|
|
120
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ...
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
class AsyncExceptionHandler(Protocol):
|
|
124
|
+
"""Protocol for asynchronous exception handlers with deferred exception pattern.
|
|
125
|
+
|
|
126
|
+
Exception handlers implement this protocol to avoid ABI boundary violations
|
|
127
|
+
with mypyc-compiled code. Instead of raising exceptions from __aexit__,
|
|
128
|
+
handlers store mapped exceptions in pending_exception for the caller to raise.
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
pending_exception: Exception | None
|
|
132
|
+
|
|
133
|
+
async def __aenter__(self) -> "AsyncExceptionHandler": ...
|
|
134
|
+
|
|
135
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool: ...
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
logger = get_logger("sqlspec.driver")
|
|
139
|
+
|
|
140
|
+
VERSION_GROUPS_MIN_FOR_MINOR = 1
|
|
141
|
+
VERSION_GROUPS_MIN_FOR_PATCH = 2
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
_CONVERT_TO_TUPLE = object()
|
|
145
|
+
_CONVERT_TO_FROZENSET = object()
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def make_cache_key_hashable(obj: Any) -> Any:
|
|
149
|
+
"""Recursively convert unhashable types to hashable ones for cache keys.
|
|
150
|
+
|
|
151
|
+
Uses an iterative stack-based approach to avoid C-stack recursion limits
|
|
152
|
+
in mypyc-compiled code.
|
|
153
|
+
|
|
154
|
+
For array-like objects (NumPy arrays, Python arrays, etc.), we use structural
|
|
155
|
+
info (dtype + shape or typecode + length) rather than content for cache keys.
|
|
156
|
+
|
|
157
|
+
Collections are processed with stack entries that track (object, parent_list, index)
|
|
158
|
+
so we can convert substructures in-place and then replace placeholders with tuples or frozensets
|
|
159
|
+
only after their children are evaluated. Dictionaries are iterated in sorted order for determinism
|
|
160
|
+
while sets fall back to a best-effort ordering if necessary.
|
|
161
|
+
|
|
162
|
+
Args:
|
|
163
|
+
obj: Object to make hashable.
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
A hashable representation of the object. Collections become tuples,
|
|
167
|
+
arrays become structural tuples like ("ndarray", dtype, shape).
|
|
168
|
+
"""
|
|
169
|
+
if isinstance(obj, (int, str, bytes, bool, float, type(None))):
|
|
170
|
+
return obj
|
|
171
|
+
|
|
172
|
+
root: list[Any] = [obj]
|
|
173
|
+
stack = [(obj, root, 0)]
|
|
174
|
+
|
|
175
|
+
while stack:
|
|
176
|
+
current_obj, parent, idx = stack.pop()
|
|
177
|
+
|
|
178
|
+
if current_obj is _CONVERT_TO_TUPLE:
|
|
179
|
+
parent[idx] = tuple(parent[idx])
|
|
180
|
+
continue
|
|
181
|
+
|
|
182
|
+
if current_obj is _CONVERT_TO_FROZENSET:
|
|
183
|
+
parent[idx] = frozenset(parent[idx])
|
|
184
|
+
continue
|
|
185
|
+
|
|
186
|
+
if has_typecode_and_len(current_obj):
|
|
187
|
+
parent[idx] = ("array", current_obj.typecode, len(current_obj))
|
|
188
|
+
continue
|
|
189
|
+
if has_typecode(current_obj):
|
|
190
|
+
parent[idx] = ("array", current_obj.typecode)
|
|
191
|
+
continue
|
|
192
|
+
if has_array_interface(current_obj):
|
|
193
|
+
try:
|
|
194
|
+
dtype_str = current_obj.dtype.str if has_dtype_str(current_obj.dtype) else str(type(current_obj))
|
|
195
|
+
shape = tuple(int(s) for s in current_obj.shape)
|
|
196
|
+
parent[idx] = ("ndarray", dtype_str, shape)
|
|
197
|
+
except (AttributeError, TypeError):
|
|
198
|
+
try:
|
|
199
|
+
length = len(current_obj)
|
|
200
|
+
parent[idx] = ("array_like", type(current_obj).__name__, length)
|
|
201
|
+
except (AttributeError, TypeError):
|
|
202
|
+
parent[idx] = ("array_like", type(current_obj).__name__)
|
|
203
|
+
continue
|
|
204
|
+
|
|
205
|
+
if isinstance(current_obj, (list, tuple)):
|
|
206
|
+
new_list = [None] * len(current_obj)
|
|
207
|
+
parent[idx] = new_list
|
|
208
|
+
|
|
209
|
+
stack.append((_CONVERT_TO_TUPLE, parent, idx))
|
|
210
|
+
|
|
211
|
+
stack.extend((current_obj[i], new_list, i) for i in range(len(current_obj) - 1, -1, -1))
|
|
212
|
+
continue
|
|
213
|
+
|
|
214
|
+
if isinstance(current_obj, dict):
|
|
215
|
+
try:
|
|
216
|
+
sorted_items = sorted(current_obj.items())
|
|
217
|
+
except TypeError:
|
|
218
|
+
sorted_items = list(current_obj.items())
|
|
219
|
+
|
|
220
|
+
items_list = []
|
|
221
|
+
for k, v in sorted_items:
|
|
222
|
+
items_list.append([k, v])
|
|
223
|
+
|
|
224
|
+
parent[idx] = items_list
|
|
225
|
+
|
|
226
|
+
stack.append((_CONVERT_TO_TUPLE, parent, idx))
|
|
227
|
+
|
|
228
|
+
for i in range(len(items_list) - 1, -1, -1):
|
|
229
|
+
stack.extend(((_CONVERT_TO_TUPLE, items_list, i), (items_list[i][1], items_list[i], 1)))
|
|
230
|
+
|
|
231
|
+
continue
|
|
232
|
+
|
|
233
|
+
if isinstance(current_obj, set):
|
|
234
|
+
try:
|
|
235
|
+
sorted_list = sorted(current_obj)
|
|
236
|
+
except TypeError:
|
|
237
|
+
sorted_list = list(current_obj)
|
|
238
|
+
|
|
239
|
+
new_list = [None] * len(sorted_list)
|
|
240
|
+
parent[idx] = new_list
|
|
241
|
+
|
|
242
|
+
stack.append((_CONVERT_TO_FROZENSET, parent, idx))
|
|
243
|
+
|
|
244
|
+
stack.extend((sorted_list[i], new_list, i) for i in range(len(sorted_list) - 1, -1, -1))
|
|
245
|
+
continue
|
|
246
|
+
|
|
247
|
+
parent[idx] = current_obj
|
|
248
|
+
|
|
249
|
+
return root[0]
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def _callable_cache_key(func: Any) -> Any:
|
|
253
|
+
"""Return a stable cache key component for callables.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
func: Callable or None.
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
Tuple identifying the callable, or None for missing callables.
|
|
260
|
+
"""
|
|
261
|
+
if func is None:
|
|
262
|
+
return None
|
|
263
|
+
module = getattr(func, "__module__", None)
|
|
264
|
+
qualname = getattr(func, "__qualname__", type(func).__name__)
|
|
265
|
+
return (module, qualname, id(func))
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def hash_stack_operations(stack: "StatementStack") -> "tuple[str, ...]":
|
|
269
|
+
"""Return SHA256 fingerprints for statements contained in the stack."""
|
|
270
|
+
hashes: list[str] = []
|
|
271
|
+
for operation in stack.operations:
|
|
272
|
+
summary = describe_stack_statement(operation.statement)
|
|
273
|
+
if not isinstance(summary, str):
|
|
274
|
+
summary = str(summary)
|
|
275
|
+
digest = hashlib.sha256(summary.encode("utf-8")).hexdigest()
|
|
276
|
+
hashes.append(digest[:16])
|
|
277
|
+
return tuple(hashes)
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
class StackExecutionObserver:
|
|
281
|
+
"""Context manager that aggregates telemetry for stack execution."""
|
|
282
|
+
|
|
283
|
+
__slots__ = (
|
|
284
|
+
"continue_on_error",
|
|
285
|
+
"driver",
|
|
286
|
+
"hashed_operations",
|
|
287
|
+
"metrics",
|
|
288
|
+
"native_pipeline",
|
|
289
|
+
"runtime",
|
|
290
|
+
"span",
|
|
291
|
+
"stack",
|
|
292
|
+
"started",
|
|
293
|
+
)
|
|
294
|
+
|
|
295
|
+
def __init__(
|
|
296
|
+
self,
|
|
297
|
+
driver: "CommonDriverAttributesMixin",
|
|
298
|
+
stack: "StatementStack",
|
|
299
|
+
continue_on_error: bool,
|
|
300
|
+
native_pipeline: bool,
|
|
301
|
+
) -> None:
|
|
302
|
+
self.driver = driver
|
|
303
|
+
self.stack = stack
|
|
304
|
+
self.continue_on_error = continue_on_error
|
|
305
|
+
self.native_pipeline = native_pipeline
|
|
306
|
+
self.runtime = driver.observability
|
|
307
|
+
self.metrics = StackExecutionMetrics(
|
|
308
|
+
adapter=type(driver).__name__,
|
|
309
|
+
statement_count=len(stack.operations),
|
|
310
|
+
continue_on_error=continue_on_error,
|
|
311
|
+
native_pipeline=native_pipeline,
|
|
312
|
+
forced_disable=driver.stack_native_disabled,
|
|
313
|
+
)
|
|
314
|
+
self.hashed_operations = hash_stack_operations(stack)
|
|
315
|
+
self.span: Any | None = None
|
|
316
|
+
self.started = 0.0
|
|
317
|
+
|
|
318
|
+
def __enter__(self) -> "StackExecutionObserver":
|
|
319
|
+
self.started = perf_counter()
|
|
320
|
+
trace_id, span_id = get_trace_context()
|
|
321
|
+
attributes = {
|
|
322
|
+
"sqlspec.stack.statement_count": len(self.stack.operations),
|
|
323
|
+
"sqlspec.stack.continue_on_error": self.continue_on_error,
|
|
324
|
+
"sqlspec.stack.native_pipeline": self.native_pipeline,
|
|
325
|
+
"sqlspec.stack.forced_disable": self.driver.stack_native_disabled,
|
|
326
|
+
}
|
|
327
|
+
self.span = self.runtime.start_span("sqlspec.stack.execute", attributes=attributes)
|
|
328
|
+
log_with_context(
|
|
329
|
+
logger,
|
|
330
|
+
logging.DEBUG,
|
|
331
|
+
"stack.execute.start",
|
|
332
|
+
driver=type(self.driver).__name__,
|
|
333
|
+
db_system=resolve_db_system(type(self.driver).__name__),
|
|
334
|
+
stack_size=len(self.stack.operations),
|
|
335
|
+
continue_on_error=self.continue_on_error,
|
|
336
|
+
native_pipeline=self.native_pipeline,
|
|
337
|
+
forced_disable=self.driver.stack_native_disabled,
|
|
338
|
+
hashed_operations=self.hashed_operations,
|
|
339
|
+
trace_id=trace_id,
|
|
340
|
+
span_id=span_id,
|
|
341
|
+
)
|
|
342
|
+
return self
|
|
343
|
+
|
|
344
|
+
def __exit__(self, exc_type: Any, exc: Exception | None, exc_tb: Any) -> Literal[False]:
|
|
345
|
+
duration = perf_counter() - self.started
|
|
346
|
+
self.metrics.record_duration(duration)
|
|
347
|
+
if exc is not None:
|
|
348
|
+
self.metrics.record_error(exc)
|
|
349
|
+
self.runtime.span_manager.end_span(self.span, error=exc if exc is not None else None)
|
|
350
|
+
self.metrics.emit(self.runtime)
|
|
351
|
+
level = logging.ERROR if exc is not None else logging.DEBUG
|
|
352
|
+
trace_id, span_id = get_trace_context()
|
|
353
|
+
log_with_context(
|
|
354
|
+
logger,
|
|
355
|
+
level,
|
|
356
|
+
"stack.execute.failed" if exc is not None else "stack.execute.complete",
|
|
357
|
+
driver=type(self.driver).__name__,
|
|
358
|
+
db_system=resolve_db_system(type(self.driver).__name__),
|
|
359
|
+
stack_size=len(self.stack.operations),
|
|
360
|
+
continue_on_error=self.continue_on_error,
|
|
361
|
+
native_pipeline=self.native_pipeline,
|
|
362
|
+
forced_disable=self.driver.stack_native_disabled,
|
|
363
|
+
hashed_operations=self.hashed_operations,
|
|
364
|
+
duration_ms=duration * 1000,
|
|
365
|
+
error_type=type(exc).__name__ if exc is not None else None,
|
|
366
|
+
trace_id=trace_id,
|
|
367
|
+
span_id=span_id,
|
|
368
|
+
)
|
|
369
|
+
return False
|
|
370
|
+
|
|
371
|
+
def record_operation_error(self, error: Exception) -> None:
|
|
372
|
+
"""Record an operation error when continue-on-error is enabled."""
|
|
373
|
+
self.metrics.record_operation_error(error)
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def describe_stack_statement(statement: "StatementProtocol | str") -> str:
|
|
377
|
+
"""Return a readable representation of a stack statement for diagnostics."""
|
|
378
|
+
if isinstance(statement, str):
|
|
379
|
+
return statement
|
|
380
|
+
if isinstance(statement, StatementProtocol): # pyright: ignore[reportUnnecessaryIsInstance]
|
|
381
|
+
return statement.raw_sql or statement.sql
|
|
382
|
+
return repr(statement)
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
def handle_single_row_error(error: ValueError) -> "NoReturn":
|
|
386
|
+
"""Normalize single-row selection errors to SQLSpec exceptions."""
|
|
387
|
+
message = str(error)
|
|
388
|
+
if message.startswith("No result found"):
|
|
389
|
+
msg = "No rows found"
|
|
390
|
+
raise NotFoundError(msg) from error
|
|
391
|
+
raise error
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
@mypyc_attr(native_class=False, allow_interpreted_subclasses=True)
|
|
395
|
+
class DataDictionaryDialectMixin:
|
|
396
|
+
"""Mixin providing dialect SQL helpers for data dictionaries."""
|
|
397
|
+
|
|
398
|
+
__slots__ = ()
|
|
399
|
+
|
|
400
|
+
dialect: str
|
|
401
|
+
|
|
402
|
+
def get_dialect_config(self) -> "DialectConfig":
|
|
403
|
+
"""Return the dialect configuration for this data dictionary."""
|
|
404
|
+
return get_dialect_config(self.dialect)
|
|
405
|
+
|
|
406
|
+
def get_query(self, name: str) -> "SQL":
|
|
407
|
+
"""Return a named SQL query for this dialect."""
|
|
408
|
+
loader = get_data_dictionary_loader()
|
|
409
|
+
return loader.get_query(self.dialect, name)
|
|
410
|
+
|
|
411
|
+
def get_query_text(self, name: str) -> str:
|
|
412
|
+
"""Return raw SQL text for a named query for this dialect."""
|
|
413
|
+
loader = get_data_dictionary_loader()
|
|
414
|
+
return loader.get_query_text(self.dialect, name)
|
|
415
|
+
|
|
416
|
+
def get_query_text_or_none(self, name: str) -> "str | None":
|
|
417
|
+
"""Return raw SQL text for a named query or None if missing."""
|
|
418
|
+
try:
|
|
419
|
+
return self.get_query_text(name)
|
|
420
|
+
except SQLFileNotFoundError:
|
|
421
|
+
return None
|
|
422
|
+
|
|
423
|
+
def resolve_schema(self, schema: "str | None") -> "str | None":
|
|
424
|
+
"""Return a schema name using dialect defaults when missing."""
|
|
425
|
+
if schema is not None:
|
|
426
|
+
return schema
|
|
427
|
+
config = self.get_dialect_config()
|
|
428
|
+
return config.default_schema
|
|
429
|
+
|
|
430
|
+
def resolve_feature_flag(self, feature: str, version: "VersionInfo | None") -> bool:
|
|
431
|
+
"""Resolve a feature flag using dialect config and version info."""
|
|
432
|
+
config = self.get_dialect_config()
|
|
433
|
+
flag = config.get_feature_flag(feature)
|
|
434
|
+
if flag is not None:
|
|
435
|
+
return flag
|
|
436
|
+
required_version = config.get_feature_version(feature)
|
|
437
|
+
if required_version is None or version is None:
|
|
438
|
+
return False
|
|
439
|
+
return bool(version >= required_version)
|
|
440
|
+
|
|
441
|
+
def list_available_features(self) -> "list[str]":
|
|
442
|
+
"""List available feature flags for this dialect."""
|
|
443
|
+
config = self.get_dialect_config()
|
|
444
|
+
features = set(config.feature_flags.keys()) | set(config.feature_versions.keys())
|
|
445
|
+
return sorted(features)
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
449
|
+
class DataDictionaryMixin:
|
|
450
|
+
"""Mixin providing common data dictionary functionality.
|
|
451
|
+
|
|
452
|
+
Includes version caching to avoid repeated database queries when checking
|
|
453
|
+
feature flags or optimal types.
|
|
454
|
+
"""
|
|
455
|
+
|
|
456
|
+
__slots__ = ("_version_cache", "_version_fetch_attempted")
|
|
457
|
+
|
|
458
|
+
_version_cache: "dict[int, VersionInfo | None]"
|
|
459
|
+
_version_fetch_attempted: "set[int]"
|
|
460
|
+
|
|
461
|
+
def __init__(self) -> None:
|
|
462
|
+
self._version_cache = {}
|
|
463
|
+
self._version_fetch_attempted = set()
|
|
464
|
+
|
|
465
|
+
def get_cached_version(self, driver_id: int) -> "VersionCacheResult":
|
|
466
|
+
"""Get cached version info for a driver.
|
|
467
|
+
|
|
468
|
+
Args:
|
|
469
|
+
driver_id: The id() of the driver instance.
|
|
470
|
+
|
|
471
|
+
Returns:
|
|
472
|
+
Tuple of (was_cached, version_info). If was_cached is False,
|
|
473
|
+
the caller should fetch the version and call cache_version().
|
|
474
|
+
|
|
475
|
+
"""
|
|
476
|
+
if driver_id in self._version_fetch_attempted:
|
|
477
|
+
return True, self._version_cache.get(driver_id)
|
|
478
|
+
return False, None
|
|
479
|
+
|
|
480
|
+
def cache_version(self, driver_id: int, version: "VersionInfo | None") -> None:
|
|
481
|
+
"""Cache version info for a driver.
|
|
482
|
+
|
|
483
|
+
Args:
|
|
484
|
+
driver_id: The id() of the driver instance.
|
|
485
|
+
version: The version info to cache (can be None if detection failed).
|
|
486
|
+
|
|
487
|
+
"""
|
|
488
|
+
self._version_fetch_attempted.add(driver_id)
|
|
489
|
+
if version is not None:
|
|
490
|
+
self._version_cache[driver_id] = version
|
|
491
|
+
|
|
492
|
+
def get_cached_version_for_driver(self, driver: Any) -> "VersionCacheResult":
|
|
493
|
+
"""Get cached version info for a driver instance.
|
|
494
|
+
|
|
495
|
+
Args:
|
|
496
|
+
driver: Database driver instance.
|
|
497
|
+
|
|
498
|
+
Returns:
|
|
499
|
+
Tuple of (was_cached, version_info).
|
|
500
|
+
|
|
501
|
+
"""
|
|
502
|
+
return self.get_cached_version(id(driver))
|
|
503
|
+
|
|
504
|
+
def cache_version_for_driver(self, driver: Any, version: "VersionInfo | None") -> None:
|
|
505
|
+
"""Cache version info for a driver instance.
|
|
506
|
+
|
|
507
|
+
Args:
|
|
508
|
+
driver: Database driver instance.
|
|
509
|
+
version: Parsed version info or None.
|
|
510
|
+
|
|
511
|
+
"""
|
|
512
|
+
self.cache_version(id(driver), version)
|
|
513
|
+
|
|
514
|
+
def parse_version_string(self, version_str: str) -> "VersionInfo | None":
|
|
515
|
+
"""Parse version string into VersionInfo.
|
|
516
|
+
|
|
517
|
+
Args:
|
|
518
|
+
version_str: Raw version string from database
|
|
519
|
+
|
|
520
|
+
Returns:
|
|
521
|
+
VersionInfo instance or None if parsing fails
|
|
522
|
+
|
|
523
|
+
"""
|
|
524
|
+
patterns = [r"(\d+)\.(\d+)\.(\d+)", r"(\d+)\.(\d+)", r"(\d+)"]
|
|
525
|
+
|
|
526
|
+
for pattern in patterns:
|
|
527
|
+
match = re.search(pattern, version_str)
|
|
528
|
+
if match:
|
|
529
|
+
groups = match.groups()
|
|
530
|
+
|
|
531
|
+
major = int(groups[0])
|
|
532
|
+
minor = int(groups[1]) if len(groups) > VERSION_GROUPS_MIN_FOR_MINOR else 0
|
|
533
|
+
patch = int(groups[2]) if len(groups) > VERSION_GROUPS_MIN_FOR_PATCH else 0
|
|
534
|
+
return VersionInfo(major, minor, patch)
|
|
535
|
+
|
|
536
|
+
return None
|
|
537
|
+
|
|
538
|
+
def parse_version_with_pattern(self, pattern: "re.Pattern[str]", version_str: str) -> "VersionInfo | None":
|
|
539
|
+
"""Parse version string using a specific regex pattern.
|
|
540
|
+
|
|
541
|
+
Args:
|
|
542
|
+
pattern: Compiled regex pattern for the version format
|
|
543
|
+
version_str: Raw version string from database
|
|
544
|
+
|
|
545
|
+
Returns:
|
|
546
|
+
VersionInfo instance or None if parsing fails
|
|
547
|
+
|
|
548
|
+
"""
|
|
549
|
+
match = pattern.search(version_str)
|
|
550
|
+
if not match:
|
|
551
|
+
return None
|
|
552
|
+
|
|
553
|
+
groups = match.groups()
|
|
554
|
+
if not groups:
|
|
555
|
+
return None
|
|
556
|
+
|
|
557
|
+
major = int(groups[0])
|
|
558
|
+
minor = int(groups[1]) if len(groups) > VERSION_GROUPS_MIN_FOR_MINOR and groups[1] else 0
|
|
559
|
+
patch = int(groups[2]) if len(groups) > VERSION_GROUPS_MIN_FOR_PATCH and groups[2] else 0
|
|
560
|
+
return VersionInfo(major, minor, patch)
|
|
561
|
+
|
|
562
|
+
def _resolve_log_adapter(self) -> str:
|
|
563
|
+
"""Resolve adapter identifier for logging."""
|
|
564
|
+
if hasattr(self, "dialect"):
|
|
565
|
+
return str(self.dialect) # pyright: ignore[reportAttributeAccessIssue]
|
|
566
|
+
return type(self).__name__
|
|
567
|
+
|
|
568
|
+
def _log_version_detected(self, adapter: str, version: VersionInfo) -> None:
|
|
569
|
+
"""Log detected database version with db.system context."""
|
|
570
|
+
|
|
571
|
+
logger.debug(
|
|
572
|
+
"Detected database version", extra={"db.system": resolve_db_system(adapter), "db.version": str(version)}
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
def _log_version_unavailable(self, adapter: str, reason: str) -> None:
|
|
576
|
+
"""Log that database version could not be determined."""
|
|
577
|
+
|
|
578
|
+
logger.debug("Database version unavailable", extra={"db.system": resolve_db_system(adapter), "reason": reason})
|
|
579
|
+
|
|
580
|
+
def _log_schema_introspect(
|
|
581
|
+
self, driver: Any, *, schema_name: "str | None", table_name: "str | None", operation: str
|
|
582
|
+
) -> None:
|
|
583
|
+
"""Log schema-level introspection activity."""
|
|
584
|
+
log_with_context(
|
|
585
|
+
logger,
|
|
586
|
+
logging.DEBUG,
|
|
587
|
+
"schema.introspect",
|
|
588
|
+
db_system=resolve_db_system(type(driver).__name__),
|
|
589
|
+
schema_name=schema_name,
|
|
590
|
+
table_name=table_name,
|
|
591
|
+
operation=operation,
|
|
592
|
+
)
|
|
593
|
+
|
|
594
|
+
def _log_table_describe(self, driver: Any, *, schema_name: "str | None", table_name: str, operation: str) -> None:
|
|
595
|
+
"""Log table-level introspection activity."""
|
|
596
|
+
log_with_context(
|
|
597
|
+
logger,
|
|
598
|
+
logging.DEBUG,
|
|
599
|
+
"table.describe",
|
|
600
|
+
db_system=resolve_db_system(type(driver).__name__),
|
|
601
|
+
schema_name=schema_name,
|
|
602
|
+
table_name=table_name,
|
|
603
|
+
operation=operation,
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
def detect_version_with_queries(self, driver: "HasExecuteProtocol", queries: "list[str]") -> "VersionInfo | None":
|
|
607
|
+
"""Try multiple version queries to detect database version.
|
|
608
|
+
|
|
609
|
+
Args:
|
|
610
|
+
driver: Database driver with execute support
|
|
611
|
+
queries: List of SQL queries to try
|
|
612
|
+
|
|
613
|
+
Returns:
|
|
614
|
+
Version information or None if detection fails
|
|
615
|
+
|
|
616
|
+
"""
|
|
617
|
+
for query in queries:
|
|
618
|
+
with suppress(Exception):
|
|
619
|
+
result: HasDataProtocol = driver.execute(query)
|
|
620
|
+
result_data = result.data
|
|
621
|
+
if result_data:
|
|
622
|
+
first_row = result_data[0]
|
|
623
|
+
version_str = str(first_row)
|
|
624
|
+
if isinstance(first_row, dict):
|
|
625
|
+
version_str = str(next(iter(first_row.values())))
|
|
626
|
+
elif isinstance(first_row, (list, tuple)):
|
|
627
|
+
version_str = str(first_row[0])
|
|
628
|
+
|
|
629
|
+
parsed_version = self.parse_version_string(version_str)
|
|
630
|
+
if parsed_version:
|
|
631
|
+
self._log_version_detected(self._resolve_log_adapter(), parsed_version)
|
|
632
|
+
return parsed_version
|
|
633
|
+
|
|
634
|
+
self._log_version_unavailable(self._resolve_log_adapter(), "queries_exhausted")
|
|
635
|
+
return None
|
|
636
|
+
|
|
637
|
+
def get_default_type_mapping(self) -> "dict[str, str]":
|
|
638
|
+
"""Get default type mappings for common categories.
|
|
639
|
+
|
|
640
|
+
Returns:
|
|
641
|
+
Dictionary mapping type categories to generic SQL types
|
|
642
|
+
|
|
643
|
+
"""
|
|
644
|
+
return {
|
|
645
|
+
"json": "TEXT",
|
|
646
|
+
"uuid": "VARCHAR(36)",
|
|
647
|
+
"boolean": "INTEGER",
|
|
648
|
+
"timestamp": "TIMESTAMP",
|
|
649
|
+
"text": "TEXT",
|
|
650
|
+
"blob": "BLOB",
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
def get_default_features(self) -> "list[str]":
|
|
654
|
+
"""Get default feature flags supported by most databases.
|
|
655
|
+
|
|
656
|
+
Returns:
|
|
657
|
+
List of commonly supported feature names
|
|
658
|
+
|
|
659
|
+
"""
|
|
660
|
+
return ["supports_transactions", "supports_prepared_statements"]
|
|
661
|
+
|
|
662
|
+
def sort_tables_topologically(self, tables: "list[str]", foreign_keys: "list[ForeignKeyMetadata]") -> "list[str]":
|
|
663
|
+
"""Sort tables topologically based on foreign key dependencies using Python.
|
|
664
|
+
|
|
665
|
+
Args:
|
|
666
|
+
tables: List of table names.
|
|
667
|
+
foreign_keys: List of foreign key metadata.
|
|
668
|
+
|
|
669
|
+
Returns:
|
|
670
|
+
List of table names in topological order (dependencies first).
|
|
671
|
+
|
|
672
|
+
Notes:
|
|
673
|
+
Self-referencing foreign keys are ignored to avoid simple cycles, and every dependency is added with the referencing table depending on its referenced table.
|
|
674
|
+
|
|
675
|
+
"""
|
|
676
|
+
sorter: graphlib.TopologicalSorter[str] = graphlib.TopologicalSorter()
|
|
677
|
+
for table in tables:
|
|
678
|
+
sorter.add(table)
|
|
679
|
+
|
|
680
|
+
for fk in foreign_keys:
|
|
681
|
+
if fk.table_name == fk.referenced_table:
|
|
682
|
+
continue
|
|
683
|
+
sorter.add(fk.table_name, fk.referenced_table)
|
|
684
|
+
|
|
685
|
+
return list(sorter.static_order())
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
class ScriptExecutionResult(NamedTuple):
|
|
689
|
+
"""Result from script execution with statement count information."""
|
|
690
|
+
|
|
691
|
+
cursor_result: Any
|
|
692
|
+
rowcount_override: int | None
|
|
693
|
+
special_data: Any
|
|
694
|
+
statement_count: int
|
|
695
|
+
successful_statements: int
|
|
696
|
+
|
|
697
|
+
|
|
698
|
+
class ExecutionResult(NamedTuple):
|
|
699
|
+
"""Execution result containing all data needed for SQLResult building."""
|
|
700
|
+
|
|
701
|
+
cursor_result: Any
|
|
702
|
+
rowcount_override: int | None
|
|
703
|
+
special_data: Any
|
|
704
|
+
selected_data: "list[dict[str, Any]] | None"
|
|
705
|
+
column_names: "list[str] | None"
|
|
706
|
+
data_row_count: int | None
|
|
707
|
+
statement_count: int | None
|
|
708
|
+
successful_statements: int | None
|
|
709
|
+
is_script_result: bool
|
|
710
|
+
is_select_result: bool
|
|
711
|
+
is_many_result: bool
|
|
712
|
+
last_inserted_id: int | str | None = None
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
EXEC_CURSOR_RESULT: Final[int] = 0
|
|
716
|
+
EXEC_ROWCOUNT_OVERRIDE: Final[int] = 1
|
|
717
|
+
EXEC_SPECIAL_DATA: Final[int] = 2
|
|
718
|
+
DEFAULT_EXECUTION_RESULT: Final["tuple[object | None, int | None, object | None]"] = (None, None, None)
|
|
719
|
+
|
|
720
|
+
|
|
721
|
+
@mypyc_attr(allow_interpreted_subclasses=True)
|
|
722
|
+
class CommonDriverAttributesMixin:
|
|
723
|
+
"""Common attributes and methods for driver adapters."""
|
|
724
|
+
|
|
725
|
+
__slots__ = ("_observability", "connection", "driver_features", "statement_config")
|
|
726
|
+
connection: "Any"
|
|
727
|
+
statement_config: "StatementConfig"
|
|
728
|
+
driver_features: "dict[str, Any]"
|
|
729
|
+
|
|
730
|
+
def __init__(
|
|
731
|
+
self,
|
|
732
|
+
connection: "Any",
|
|
733
|
+
statement_config: "StatementConfig",
|
|
734
|
+
driver_features: "dict[str, Any] | None" = None,
|
|
735
|
+
observability: "ObservabilityRuntime | None" = None,
|
|
736
|
+
) -> None:
|
|
737
|
+
"""Initialize driver adapter with connection and configuration.
|
|
738
|
+
|
|
739
|
+
Args:
|
|
740
|
+
connection: Database connection instance
|
|
741
|
+
statement_config: Statement configuration for the driver
|
|
742
|
+
driver_features: Driver-specific features like extensions, secrets, and connection callbacks
|
|
743
|
+
observability: Optional runtime handling lifecycle hooks, observers, and spans
|
|
744
|
+
|
|
745
|
+
"""
|
|
746
|
+
self.connection = connection
|
|
747
|
+
self.statement_config = statement_config
|
|
748
|
+
self.driver_features = driver_features or {}
|
|
749
|
+
self._observability = observability
|
|
750
|
+
|
|
751
|
+
def attach_observability(self, runtime: "ObservabilityRuntime") -> None:
|
|
752
|
+
"""Attach or replace the observability runtime."""
|
|
753
|
+
self._observability = runtime
|
|
754
|
+
|
|
755
|
+
@property
|
|
756
|
+
def observability(self) -> "ObservabilityRuntime":
|
|
757
|
+
"""Return the observability runtime, creating a disabled instance when absent."""
|
|
758
|
+
if self._observability is None:
|
|
759
|
+
self._observability = ObservabilityRuntime(config_name=type(self).__name__)
|
|
760
|
+
return self._observability
|
|
761
|
+
|
|
762
|
+
@property
|
|
763
|
+
def is_async(self) -> bool:
|
|
764
|
+
"""Return whether the driver executes asynchronously.
|
|
765
|
+
|
|
766
|
+
Returns:
|
|
767
|
+
False for sync drivers.
|
|
768
|
+
|
|
769
|
+
"""
|
|
770
|
+
return False
|
|
771
|
+
|
|
772
|
+
@property
|
|
773
|
+
def stack_native_disabled(self) -> bool:
|
|
774
|
+
"""Return True when native stack execution is disabled for this driver."""
|
|
775
|
+
return bool(self.driver_features.get("stack_native_disabled", False))
|
|
776
|
+
|
|
777
|
+
storage_pipeline_factory: "ClassVar[type[SyncStoragePipeline | AsyncStoragePipeline] | None]" = None
|
|
778
|
+
|
|
779
|
+
def storage_capabilities(self) -> "StorageCapabilities":
|
|
780
|
+
"""Return cached storage capabilities for the active driver.
|
|
781
|
+
|
|
782
|
+
Returns:
|
|
783
|
+
StorageCapabilities dict with capability flags.
|
|
784
|
+
|
|
785
|
+
Raises:
|
|
786
|
+
StorageCapabilityError: If storage capabilities are not configured.
|
|
787
|
+
|
|
788
|
+
"""
|
|
789
|
+
capabilities = self.driver_features.get("storage_capabilities")
|
|
790
|
+
if capabilities is None:
|
|
791
|
+
msg = "Storage capabilities are not configured for this driver."
|
|
792
|
+
raise StorageCapabilityError(msg, capability="storage_capabilities")
|
|
793
|
+
return cast("StorageCapabilities", dict(capabilities))
|
|
794
|
+
|
|
795
|
+
def _require_capability(self, capability_flag: str) -> None:
|
|
796
|
+
"""Check that a storage capability is enabled.
|
|
797
|
+
|
|
798
|
+
Args:
|
|
799
|
+
capability_flag: The capability flag to check.
|
|
800
|
+
|
|
801
|
+
Raises:
|
|
802
|
+
StorageCapabilityError: If the capability is not available.
|
|
803
|
+
|
|
804
|
+
"""
|
|
805
|
+
capabilities = self.storage_capabilities()
|
|
806
|
+
if capabilities.get(capability_flag, False):
|
|
807
|
+
return
|
|
808
|
+
human_label = CAPABILITY_HINTS.get(capability_flag, capability_flag)
|
|
809
|
+
remediation = "Check adapter supports this capability or stage artifacts via storage pipeline."
|
|
810
|
+
msg = f"{human_label} is not available for this adapter"
|
|
811
|
+
raise StorageCapabilityError(msg, capability=capability_flag, remediation=remediation)
|
|
812
|
+
|
|
813
|
+
def _raise_storage_not_implemented(self, capability: str) -> None:
|
|
814
|
+
"""Raise NotImplementedError for storage operations.
|
|
815
|
+
|
|
816
|
+
Args:
|
|
817
|
+
capability: The capability that is not implemented.
|
|
818
|
+
|
|
819
|
+
Raises:
|
|
820
|
+
StorageCapabilityError: Always raised.
|
|
821
|
+
|
|
822
|
+
"""
|
|
823
|
+
msg = f"{capability} is not implemented for this driver"
|
|
824
|
+
remediation = "Override storage methods on the adapter to enable this capability."
|
|
825
|
+
raise StorageCapabilityError(msg, capability=capability, remediation=remediation)
|
|
826
|
+
|
|
827
|
+
@overload
|
|
828
|
+
@staticmethod
|
|
829
|
+
def to_schema(data: "list[dict[str, Any]]", *, schema_type: "type[SchemaT]") -> "list[SchemaT]": ...
|
|
830
|
+
@overload
|
|
831
|
+
@staticmethod
|
|
832
|
+
def to_schema(data: "list[dict[str, Any]]", *, schema_type: None = None) -> "list[dict[str, Any]]": ...
|
|
833
|
+
@overload
|
|
834
|
+
@staticmethod
|
|
835
|
+
def to_schema(data: "dict[str, Any]", *, schema_type: "type[SchemaT]") -> "SchemaT": ...
|
|
836
|
+
@overload
|
|
837
|
+
@staticmethod
|
|
838
|
+
def to_schema(data: "dict[str, Any]", *, schema_type: None = None) -> "dict[str, Any]": ...
|
|
839
|
+
@overload
|
|
840
|
+
@staticmethod
|
|
841
|
+
def to_schema(data: Any, *, schema_type: "type[SchemaT]") -> Any: ...
|
|
842
|
+
@overload
|
|
843
|
+
@staticmethod
|
|
844
|
+
def to_schema(data: Any, *, schema_type: None = None) -> Any: ...
|
|
845
|
+
|
|
846
|
+
@staticmethod
|
|
847
|
+
def to_schema(data: Any, *, schema_type: "type[Any] | None" = None) -> Any:
|
|
848
|
+
"""Convert data to a specified schema type.
|
|
849
|
+
|
|
850
|
+
Supports transformation to various schema types including:
|
|
851
|
+
- TypedDict
|
|
852
|
+
- dataclasses
|
|
853
|
+
- msgspec Structs
|
|
854
|
+
- Pydantic models
|
|
855
|
+
- attrs classes
|
|
856
|
+
|
|
857
|
+
Args:
|
|
858
|
+
data: Input data to convert (dict, list of dicts, or other).
|
|
859
|
+
schema_type: Target schema type for conversion. If None, returns data unchanged.
|
|
860
|
+
|
|
861
|
+
Returns:
|
|
862
|
+
Converted data in the specified schema type, or original data if schema_type is None.
|
|
863
|
+
|
|
864
|
+
|
|
865
|
+
"""
|
|
866
|
+
return _to_schema_impl(data, schema_type=schema_type)
|
|
867
|
+
|
|
868
|
+
def create_execution_result(
|
|
869
|
+
self,
|
|
870
|
+
cursor_result: Any,
|
|
871
|
+
*,
|
|
872
|
+
rowcount_override: int | None = None,
|
|
873
|
+
special_data: Any = None,
|
|
874
|
+
selected_data: "list[dict[str, Any]] | None" = None,
|
|
875
|
+
column_names: "list[str] | None" = None,
|
|
876
|
+
data_row_count: int | None = None,
|
|
877
|
+
statement_count: int | None = None,
|
|
878
|
+
successful_statements: int | None = None,
|
|
879
|
+
is_script_result: bool = False,
|
|
880
|
+
is_select_result: bool = False,
|
|
881
|
+
is_many_result: bool = False,
|
|
882
|
+
last_inserted_id: int | str | None = None,
|
|
883
|
+
) -> ExecutionResult:
|
|
884
|
+
"""Create ExecutionResult with all necessary data for any operation type.
|
|
885
|
+
|
|
886
|
+
Args:
|
|
887
|
+
cursor_result: The raw result returned by the database cursor/driver
|
|
888
|
+
rowcount_override: Optional override for the number of affected rows
|
|
889
|
+
special_data: Any special metadata or additional information
|
|
890
|
+
selected_data: For SELECT operations, the extracted row data
|
|
891
|
+
column_names: For SELECT operations, the column names
|
|
892
|
+
data_row_count: For SELECT operations, the number of rows returned
|
|
893
|
+
statement_count: For script operations, total number of statements
|
|
894
|
+
successful_statements: For script operations, number of successful statements
|
|
895
|
+
is_script_result: Whether this result is from script execution
|
|
896
|
+
is_select_result: Whether this result is from a SELECT operation
|
|
897
|
+
is_many_result: Whether this result is from an execute_many operation
|
|
898
|
+
last_inserted_id: The ID of the last inserted row (if applicable)
|
|
899
|
+
|
|
900
|
+
Returns:
|
|
901
|
+
ExecutionResult configured for the specified operation type
|
|
902
|
+
|
|
903
|
+
"""
|
|
904
|
+
return ExecutionResult(
|
|
905
|
+
cursor_result=cursor_result,
|
|
906
|
+
rowcount_override=rowcount_override,
|
|
907
|
+
special_data=special_data,
|
|
908
|
+
selected_data=selected_data,
|
|
909
|
+
column_names=column_names,
|
|
910
|
+
data_row_count=data_row_count,
|
|
911
|
+
statement_count=statement_count,
|
|
912
|
+
successful_statements=successful_statements,
|
|
913
|
+
is_script_result=is_script_result,
|
|
914
|
+
is_select_result=is_select_result,
|
|
915
|
+
is_many_result=is_many_result,
|
|
916
|
+
last_inserted_id=last_inserted_id,
|
|
917
|
+
)
|
|
918
|
+
|
|
919
|
+
def build_statement_result(self, statement: "SQL", execution_result: ExecutionResult) -> "SQLResult":
|
|
920
|
+
"""Build and return the SQLResult from ExecutionResult data.
|
|
921
|
+
|
|
922
|
+
Args:
|
|
923
|
+
statement: SQL statement that was executed
|
|
924
|
+
execution_result: ExecutionResult containing all necessary data
|
|
925
|
+
|
|
926
|
+
Returns:
|
|
927
|
+
SQLResult with complete execution data
|
|
928
|
+
|
|
929
|
+
"""
|
|
930
|
+
if execution_result.is_script_result:
|
|
931
|
+
return SQLResult(
|
|
932
|
+
statement=statement,
|
|
933
|
+
data=[],
|
|
934
|
+
rows_affected=execution_result.rowcount_override or 0,
|
|
935
|
+
operation_type="SCRIPT",
|
|
936
|
+
total_statements=execution_result.statement_count or 0,
|
|
937
|
+
successful_statements=execution_result.successful_statements or 0,
|
|
938
|
+
metadata=execution_result.special_data or {"status_message": "OK"},
|
|
939
|
+
)
|
|
940
|
+
|
|
941
|
+
if execution_result.is_select_result:
|
|
942
|
+
return SQLResult(
|
|
943
|
+
statement=statement,
|
|
944
|
+
data=execution_result.selected_data or [],
|
|
945
|
+
column_names=execution_result.column_names or [],
|
|
946
|
+
rows_affected=execution_result.data_row_count or 0,
|
|
947
|
+
operation_type="SELECT",
|
|
948
|
+
metadata=execution_result.special_data or {},
|
|
949
|
+
)
|
|
950
|
+
|
|
951
|
+
return SQLResult(
|
|
952
|
+
statement=statement,
|
|
953
|
+
data=[],
|
|
954
|
+
rows_affected=execution_result.rowcount_override or 0,
|
|
955
|
+
operation_type=statement.operation_type,
|
|
956
|
+
last_inserted_id=execution_result.last_inserted_id,
|
|
957
|
+
metadata=execution_result.special_data or {"status_message": "OK"},
|
|
958
|
+
)
|
|
959
|
+
|
|
960
|
+
def _should_force_select(self, statement: "SQL", cursor: object) -> bool:
|
|
961
|
+
"""Determine if a statement with unknown type should be treated as SELECT.
|
|
962
|
+
|
|
963
|
+
Uses driver metadata (statement_type, description/schema) as a safety net when
|
|
964
|
+
the compiler cannot classify the operation. This remains conservative by only
|
|
965
|
+
triggering when the operation type is "UNKNOWN".
|
|
966
|
+
|
|
967
|
+
Args:
|
|
968
|
+
statement: SQL statement being executed.
|
|
969
|
+
cursor: Database cursor/job object that may expose metadata.
|
|
970
|
+
|
|
971
|
+
Returns:
|
|
972
|
+
True when cursor metadata indicates a row-returning operation despite an
|
|
973
|
+
unknown operation type; otherwise False.
|
|
974
|
+
|
|
975
|
+
"""
|
|
976
|
+
if statement.operation_type != "UNKNOWN":
|
|
977
|
+
return False
|
|
978
|
+
|
|
979
|
+
if has_statement_type(cursor) and isinstance(cursor.statement_type, str):
|
|
980
|
+
statement_type = cursor.statement_type
|
|
981
|
+
else:
|
|
982
|
+
statement_type = None
|
|
983
|
+
if isinstance(statement_type, str) and statement_type.upper() == "SELECT":
|
|
984
|
+
return True
|
|
985
|
+
|
|
986
|
+
if has_cursor_metadata(cursor):
|
|
987
|
+
return bool(cursor.description)
|
|
988
|
+
return False
|
|
989
|
+
|
|
990
|
+
def prepare_statement(
|
|
991
|
+
self,
|
|
992
|
+
statement: "Statement | QueryBuilder",
|
|
993
|
+
parameters: "tuple[StatementParameters | StatementFilter, ...]" = (),
|
|
994
|
+
*,
|
|
995
|
+
statement_config: "StatementConfig | None" = None,
|
|
996
|
+
kwargs: "dict[str, Any] | None" = None,
|
|
997
|
+
) -> "SQL":
|
|
998
|
+
"""Build SQL statement from various input types.
|
|
999
|
+
|
|
1000
|
+
Ensures dialect is set and preserves existing state when rebuilding SQL objects.
|
|
1001
|
+
|
|
1002
|
+
Args:
|
|
1003
|
+
statement: SQL statement or QueryBuilder to prepare
|
|
1004
|
+
parameters: Parameters for the SQL statement
|
|
1005
|
+
statement_config: Optional statement configuration override.
|
|
1006
|
+
kwargs: Additional keyword arguments
|
|
1007
|
+
|
|
1008
|
+
Returns:
|
|
1009
|
+
Prepared SQL statement
|
|
1010
|
+
|
|
1011
|
+
"""
|
|
1012
|
+
if statement_config is None:
|
|
1013
|
+
statement_config = self.statement_config
|
|
1014
|
+
kwargs = kwargs or {}
|
|
1015
|
+
filters, data_parameters = self._split_parameters(parameters)
|
|
1016
|
+
|
|
1017
|
+
if isinstance(statement, QueryBuilder):
|
|
1018
|
+
sql_statement = self._prepare_from_builder(statement, data_parameters, statement_config, kwargs)
|
|
1019
|
+
elif isinstance(statement, SQL):
|
|
1020
|
+
sql_statement = self._prepare_from_sql(statement, data_parameters, statement_config, kwargs)
|
|
1021
|
+
else:
|
|
1022
|
+
sql_statement = self._prepare_from_string(statement, data_parameters, statement_config, kwargs)
|
|
1023
|
+
|
|
1024
|
+
return self._apply_filters(sql_statement, filters)
|
|
1025
|
+
|
|
1026
|
+
def _split_parameters(
|
|
1027
|
+
self, parameters: "tuple[StatementParameters | StatementFilter, ...]"
|
|
1028
|
+
) -> "tuple[list[StatementFilter], list[StatementParameters]]":
|
|
1029
|
+
filters: list[StatementFilter] = []
|
|
1030
|
+
data_parameters: list[StatementParameters] = []
|
|
1031
|
+
for param in parameters:
|
|
1032
|
+
if is_statement_filter(param):
|
|
1033
|
+
filters.append(param)
|
|
1034
|
+
else:
|
|
1035
|
+
data_parameters.append(param)
|
|
1036
|
+
return filters, data_parameters
|
|
1037
|
+
|
|
1038
|
+
def _prepare_from_builder(
|
|
1039
|
+
self,
|
|
1040
|
+
builder: "QueryBuilder",
|
|
1041
|
+
data_parameters: "list[StatementParameters]",
|
|
1042
|
+
statement_config: "StatementConfig",
|
|
1043
|
+
kwargs: "dict[str, Any]",
|
|
1044
|
+
) -> "SQL":
|
|
1045
|
+
sql_statement = builder.to_statement(statement_config)
|
|
1046
|
+
if data_parameters or kwargs:
|
|
1047
|
+
merged_parameters = (
|
|
1048
|
+
(*sql_statement.positional_parameters, *tuple(data_parameters))
|
|
1049
|
+
if data_parameters
|
|
1050
|
+
else sql_statement.positional_parameters
|
|
1051
|
+
)
|
|
1052
|
+
statement_seed = sql_statement.raw_expression or sql_statement.raw_sql
|
|
1053
|
+
return SQL(statement_seed, *merged_parameters, statement_config=statement_config, **kwargs)
|
|
1054
|
+
return sql_statement
|
|
1055
|
+
|
|
1056
|
+
def _prepare_from_sql(
|
|
1057
|
+
self,
|
|
1058
|
+
sql_statement: "SQL",
|
|
1059
|
+
data_parameters: "list[StatementParameters]",
|
|
1060
|
+
statement_config: "StatementConfig",
|
|
1061
|
+
kwargs: "dict[str, Any]",
|
|
1062
|
+
) -> "SQL":
|
|
1063
|
+
if data_parameters or kwargs:
|
|
1064
|
+
merged_parameters = (
|
|
1065
|
+
(*sql_statement.positional_parameters, *tuple(data_parameters))
|
|
1066
|
+
if data_parameters
|
|
1067
|
+
else sql_statement.positional_parameters
|
|
1068
|
+
)
|
|
1069
|
+
statement_seed = sql_statement.raw_expression or sql_statement.raw_sql
|
|
1070
|
+
return SQL(statement_seed, *merged_parameters, statement_config=statement_config, **kwargs)
|
|
1071
|
+
|
|
1072
|
+
needs_rebuild = False
|
|
1073
|
+
if statement_config.dialect and (
|
|
1074
|
+
not sql_statement.statement_config.dialect
|
|
1075
|
+
or sql_statement.statement_config.dialect != statement_config.dialect
|
|
1076
|
+
):
|
|
1077
|
+
needs_rebuild = True
|
|
1078
|
+
|
|
1079
|
+
if (
|
|
1080
|
+
sql_statement.statement_config.parameter_config.default_execution_parameter_style
|
|
1081
|
+
!= statement_config.parameter_config.default_execution_parameter_style
|
|
1082
|
+
):
|
|
1083
|
+
needs_rebuild = True
|
|
1084
|
+
|
|
1085
|
+
if needs_rebuild:
|
|
1086
|
+
statement_seed = sql_statement.raw_expression or sql_statement.raw_sql or sql_statement.sql
|
|
1087
|
+
if sql_statement.is_many and sql_statement.parameters:
|
|
1088
|
+
return SQL(statement_seed, sql_statement.parameters, statement_config=statement_config, is_many=True)
|
|
1089
|
+
if sql_statement.named_parameters:
|
|
1090
|
+
return SQL(statement_seed, statement_config=statement_config, **sql_statement.named_parameters)
|
|
1091
|
+
return SQL(statement_seed, *sql_statement.positional_parameters, statement_config=statement_config)
|
|
1092
|
+
return sql_statement
|
|
1093
|
+
|
|
1094
|
+
def _prepare_from_string(
|
|
1095
|
+
self,
|
|
1096
|
+
statement: "Statement",
|
|
1097
|
+
data_parameters: "list[StatementParameters]",
|
|
1098
|
+
statement_config: "StatementConfig",
|
|
1099
|
+
kwargs: "dict[str, Any]",
|
|
1100
|
+
) -> "SQL":
|
|
1101
|
+
return SQL(statement, *tuple(data_parameters), statement_config=statement_config, **kwargs)
|
|
1102
|
+
|
|
1103
|
+
def _apply_filters(self, sql_statement: "SQL", filters: "list[StatementFilter]") -> "SQL":
|
|
1104
|
+
for filter_obj in filters:
|
|
1105
|
+
sql_statement = filter_obj.append_to_statement(sql_statement)
|
|
1106
|
+
return sql_statement
|
|
1107
|
+
|
|
1108
|
+
def split_script_statements(
|
|
1109
|
+
self, script: str, statement_config: "StatementConfig", strip_trailing_semicolon: bool = False
|
|
1110
|
+
) -> "list[str]":
|
|
1111
|
+
"""Split a SQL script into individual statements.
|
|
1112
|
+
|
|
1113
|
+
Uses a lexer-driven state machine to handle multi-statement scripts,
|
|
1114
|
+
including complex constructs like PL/SQL blocks, T-SQL batches, and nested blocks.
|
|
1115
|
+
|
|
1116
|
+
Args:
|
|
1117
|
+
script: The SQL script to split
|
|
1118
|
+
statement_config: Statement configuration containing dialect information
|
|
1119
|
+
strip_trailing_semicolon: If True, remove trailing semicolons from statements
|
|
1120
|
+
|
|
1121
|
+
Returns:
|
|
1122
|
+
A list of individual SQL statements
|
|
1123
|
+
|
|
1124
|
+
"""
|
|
1125
|
+
return [
|
|
1126
|
+
sql_script.strip()
|
|
1127
|
+
for sql_script in split_sql_script(
|
|
1128
|
+
script, dialect=str(statement_config.dialect), strip_trailing_terminator=strip_trailing_semicolon
|
|
1129
|
+
)
|
|
1130
|
+
if sql_script.strip()
|
|
1131
|
+
]
|
|
1132
|
+
|
|
1133
|
+
def prepare_driver_parameters(
|
|
1134
|
+
self,
|
|
1135
|
+
parameters: "StatementParameters | list[StatementParameters] | tuple[StatementParameters, ...]",
|
|
1136
|
+
statement_config: "StatementConfig",
|
|
1137
|
+
is_many: bool = False,
|
|
1138
|
+
prepared_statement: Any | None = None, # pyright: ignore[reportUnusedParameter]
|
|
1139
|
+
) -> "ConvertedParameters":
|
|
1140
|
+
"""Prepare parameters for database driver consumption.
|
|
1141
|
+
|
|
1142
|
+
Normalizes parameter structure and unwraps TypedParameter objects
|
|
1143
|
+
to their underlying values, which database drivers expect.
|
|
1144
|
+
|
|
1145
|
+
Args:
|
|
1146
|
+
parameters: Parameters in any format (dict, list, tuple, scalar, TypedParameter)
|
|
1147
|
+
statement_config: Statement configuration for parameter style detection
|
|
1148
|
+
is_many: If True, handle as executemany parameter sequence
|
|
1149
|
+
prepared_statement: Optional prepared statement containing metadata for parameter processing
|
|
1150
|
+
|
|
1151
|
+
Returns:
|
|
1152
|
+
Parameters with TypedParameter objects unwrapped to primitive values
|
|
1153
|
+
|
|
1154
|
+
"""
|
|
1155
|
+
if parameters is None and statement_config.parameter_config.needs_static_script_compilation:
|
|
1156
|
+
return None
|
|
1157
|
+
|
|
1158
|
+
if not parameters:
|
|
1159
|
+
return []
|
|
1160
|
+
|
|
1161
|
+
if is_many:
|
|
1162
|
+
if isinstance(parameters, list):
|
|
1163
|
+
return [self._format_parameter_set_for_many(param_set, statement_config) for param_set in parameters]
|
|
1164
|
+
return [self._format_parameter_set_for_many(parameters, statement_config)]
|
|
1165
|
+
return self._format_parameter_set(parameters, statement_config)
|
|
1166
|
+
|
|
1167
|
+
def _apply_coercion(self, value: object, type_coercion_map: "dict[type, Callable[[Any], Any]] | None") -> object:
|
|
1168
|
+
"""Apply type coercion to a single value.
|
|
1169
|
+
|
|
1170
|
+
Args:
|
|
1171
|
+
value: Value to coerce (may be TypedParameter or raw value)
|
|
1172
|
+
type_coercion_map: Optional type coercion map
|
|
1173
|
+
|
|
1174
|
+
Returns:
|
|
1175
|
+
Coerced value with TypedParameter unwrapped
|
|
1176
|
+
|
|
1177
|
+
"""
|
|
1178
|
+
unwrapped_value = value.value if isinstance(value, TypedParameter) else value
|
|
1179
|
+
if type_coercion_map:
|
|
1180
|
+
for type_check, converter in type_coercion_map.items():
|
|
1181
|
+
if isinstance(unwrapped_value, type_check):
|
|
1182
|
+
return converter(unwrapped_value)
|
|
1183
|
+
return unwrapped_value
|
|
1184
|
+
|
|
1185
|
+
def _format_parameter_set_for_many(
|
|
1186
|
+
self, parameters: "StatementParameters", statement_config: "StatementConfig"
|
|
1187
|
+
) -> "ConvertedParameters":
|
|
1188
|
+
"""Prepare a single parameter set for execute_many operations.
|
|
1189
|
+
|
|
1190
|
+
Handles parameter sets without converting the structure to array format,
|
|
1191
|
+
applying type coercion to individual values while preserving structure.
|
|
1192
|
+
|
|
1193
|
+
Args:
|
|
1194
|
+
parameters: Single parameter set (tuple, list, or dict)
|
|
1195
|
+
statement_config: Statement configuration for parameter style detection
|
|
1196
|
+
|
|
1197
|
+
Returns:
|
|
1198
|
+
Processed parameter set with individual values coerced but structure preserved
|
|
1199
|
+
|
|
1200
|
+
"""
|
|
1201
|
+
if not parameters:
|
|
1202
|
+
return []
|
|
1203
|
+
|
|
1204
|
+
type_coercion_map = statement_config.parameter_config.type_coercion_map
|
|
1205
|
+
coerce_value = self._apply_coercion
|
|
1206
|
+
|
|
1207
|
+
if not isinstance(parameters, (dict, list, tuple)):
|
|
1208
|
+
return [coerce_value(parameters, type_coercion_map)]
|
|
1209
|
+
|
|
1210
|
+
if isinstance(parameters, dict):
|
|
1211
|
+
return {k: coerce_value(v, type_coercion_map) for k, v in parameters.items()}
|
|
1212
|
+
|
|
1213
|
+
coerced_params = [coerce_value(p, type_coercion_map) for p in parameters]
|
|
1214
|
+
return tuple(coerced_params) if isinstance(parameters, tuple) else coerced_params
|
|
1215
|
+
|
|
1216
|
+
def _format_parameter_set(
|
|
1217
|
+
self, parameters: "StatementParameters", statement_config: "StatementConfig"
|
|
1218
|
+
) -> "ConvertedParameters":
|
|
1219
|
+
"""Prepare a single parameter set for database driver consumption.
|
|
1220
|
+
|
|
1221
|
+
Args:
|
|
1222
|
+
parameters: Single parameter set in any format
|
|
1223
|
+
statement_config: Statement configuration for parameter style detection
|
|
1224
|
+
|
|
1225
|
+
Returns:
|
|
1226
|
+
Processed parameter set with TypedParameter objects unwrapped and type coercion applied
|
|
1227
|
+
|
|
1228
|
+
"""
|
|
1229
|
+
if not parameters:
|
|
1230
|
+
return []
|
|
1231
|
+
|
|
1232
|
+
type_coercion_map = statement_config.parameter_config.type_coercion_map
|
|
1233
|
+
coerce_value = self._apply_coercion
|
|
1234
|
+
|
|
1235
|
+
if not isinstance(parameters, (dict, list, tuple)):
|
|
1236
|
+
return [coerce_value(parameters, type_coercion_map)]
|
|
1237
|
+
|
|
1238
|
+
if isinstance(parameters, dict):
|
|
1239
|
+
if statement_config.parameter_config.supported_execution_parameter_styles and (
|
|
1240
|
+
ParameterStyle.NAMED_PYFORMAT in statement_config.parameter_config.supported_execution_parameter_styles
|
|
1241
|
+
or ParameterStyle.NAMED_COLON in statement_config.parameter_config.supported_execution_parameter_styles
|
|
1242
|
+
):
|
|
1243
|
+
return {k: coerce_value(v, type_coercion_map) for k, v in parameters.items()}
|
|
1244
|
+
if statement_config.parameter_config.default_parameter_style in {
|
|
1245
|
+
ParameterStyle.NUMERIC,
|
|
1246
|
+
ParameterStyle.QMARK,
|
|
1247
|
+
ParameterStyle.POSITIONAL_PYFORMAT,
|
|
1248
|
+
}:
|
|
1249
|
+
sorted_items = sorted(parameters.items(), key=_parameter_sort_key)
|
|
1250
|
+
return [coerce_value(value, type_coercion_map) for _, value in sorted_items]
|
|
1251
|
+
|
|
1252
|
+
return {k: coerce_value(v, type_coercion_map) for k, v in parameters.items()}
|
|
1253
|
+
|
|
1254
|
+
coerced_params = [coerce_value(p, type_coercion_map) for p in parameters]
|
|
1255
|
+
if statement_config.parameter_config.preserve_parameter_format and isinstance(parameters, tuple):
|
|
1256
|
+
return tuple(coerced_params)
|
|
1257
|
+
return coerced_params
|
|
1258
|
+
|
|
1259
|
+
def _get_compiled_sql(
|
|
1260
|
+
self, statement: "SQL", statement_config: "StatementConfig", flatten_single_parameters: bool = False
|
|
1261
|
+
) -> "tuple[str, object]":
|
|
1262
|
+
"""Get compiled SQL with parameter style conversion and caching.
|
|
1263
|
+
|
|
1264
|
+
Compiles the SQL statement and applies parameter style conversion.
|
|
1265
|
+
Results are cached when caching is enabled.
|
|
1266
|
+
|
|
1267
|
+
Args:
|
|
1268
|
+
statement: SQL statement to compile
|
|
1269
|
+
statement_config: Statement configuration including parameter config and dialect
|
|
1270
|
+
flatten_single_parameters: If True, flatten single-element lists for scalar parameters
|
|
1271
|
+
|
|
1272
|
+
Returns:
|
|
1273
|
+
Tuple of (compiled_sql, parameters)
|
|
1274
|
+
|
|
1275
|
+
"""
|
|
1276
|
+
compiled_statement, prepared_parameters = self._get_compiled_statement(
|
|
1277
|
+
statement, statement_config, flatten_single_parameters=flatten_single_parameters
|
|
1278
|
+
)
|
|
1279
|
+
return compiled_statement.compiled_sql, prepared_parameters
|
|
1280
|
+
|
|
1281
|
+
def _get_compiled_statement(
|
|
1282
|
+
self, statement: "SQL", statement_config: "StatementConfig", flatten_single_parameters: bool = False
|
|
1283
|
+
) -> "tuple[CachedStatement, object]":
|
|
1284
|
+
"""Compile SQL and return cached statement metadata plus prepared parameters."""
|
|
1285
|
+
cache_config = get_cache_config()
|
|
1286
|
+
dialect_key = str(statement.dialect) if statement.dialect else None
|
|
1287
|
+
cache_key = None
|
|
1288
|
+
cache = None
|
|
1289
|
+
if cache_config.compiled_cache_enabled and statement_config.enable_caching:
|
|
1290
|
+
cache_key = self._generate_compilation_cache_key(statement, statement_config, flatten_single_parameters)
|
|
1291
|
+
cache = get_cache()
|
|
1292
|
+
cached_result = cache.get_statement(cache_key, dialect_key)
|
|
1293
|
+
if cached_result is not None and isinstance(cached_result, CachedStatement):
|
|
1294
|
+
return cached_result, cached_result.parameters
|
|
1295
|
+
|
|
1296
|
+
prepared_statement = self.prepare_statement(statement, statement_config=statement_config)
|
|
1297
|
+
compiled_sql, execution_parameters = prepared_statement.compile()
|
|
1298
|
+
|
|
1299
|
+
prepared_parameters = self.prepare_driver_parameters(
|
|
1300
|
+
execution_parameters,
|
|
1301
|
+
statement_config,
|
|
1302
|
+
is_many=prepared_statement.is_many,
|
|
1303
|
+
prepared_statement=prepared_statement,
|
|
1304
|
+
)
|
|
1305
|
+
|
|
1306
|
+
cached_parameters = tuple(prepared_parameters) if isinstance(prepared_parameters, list) else prepared_parameters
|
|
1307
|
+
cached_statement = CachedStatement(
|
|
1308
|
+
compiled_sql=compiled_sql, parameters=cached_parameters, expression=prepared_statement.expression
|
|
1309
|
+
)
|
|
1310
|
+
|
|
1311
|
+
if cache_key is not None and cache is not None:
|
|
1312
|
+
cache.put_statement(cache_key, cached_statement, dialect_key)
|
|
1313
|
+
|
|
1314
|
+
return cached_statement, prepared_parameters
|
|
1315
|
+
|
|
1316
|
+
def _generate_compilation_cache_key(
|
|
1317
|
+
self, statement: "SQL", config: "StatementConfig", flatten_single_parameters: bool
|
|
1318
|
+
) -> str:
|
|
1319
|
+
"""Generate cache key that includes all compilation context.
|
|
1320
|
+
|
|
1321
|
+
Creates a deterministic cache key that includes all factors that affect SQL compilation,
|
|
1322
|
+
preventing cache contamination between different compilation contexts.
|
|
1323
|
+
"""
|
|
1324
|
+
statement_transformers = (
|
|
1325
|
+
tuple(_callable_cache_key(transformer) for transformer in config.statement_transformers)
|
|
1326
|
+
if config.statement_transformers
|
|
1327
|
+
else ()
|
|
1328
|
+
)
|
|
1329
|
+
context_hash = hash((
|
|
1330
|
+
config.parameter_config.hash(),
|
|
1331
|
+
config.dialect,
|
|
1332
|
+
statement.is_script,
|
|
1333
|
+
statement.is_many,
|
|
1334
|
+
flatten_single_parameters,
|
|
1335
|
+
_callable_cache_key(config.output_transformer),
|
|
1336
|
+
statement_transformers,
|
|
1337
|
+
_callable_cache_key(config.parameter_config.output_transformer),
|
|
1338
|
+
_callable_cache_key(config.parameter_config.ast_transformer),
|
|
1339
|
+
bool(config.parameter_config.needs_static_script_compilation),
|
|
1340
|
+
))
|
|
1341
|
+
|
|
1342
|
+
params = statement.parameters
|
|
1343
|
+
|
|
1344
|
+
if params is None or (isinstance(params, (list, tuple, dict)) and not params):
|
|
1345
|
+
return f"compiled:{hash(statement.sql)}:{context_hash}"
|
|
1346
|
+
|
|
1347
|
+
if isinstance(params, tuple) and all(isinstance(p, (int, str, bytes, bool, type(None))) for p in params):
|
|
1348
|
+
try:
|
|
1349
|
+
return (
|
|
1350
|
+
f"compiled:{hash((statement.sql, params, statement.is_many, statement.is_script))}:{context_hash}"
|
|
1351
|
+
)
|
|
1352
|
+
except TypeError:
|
|
1353
|
+
pass
|
|
1354
|
+
|
|
1355
|
+
params_fingerprint = fingerprint_parameters(params)
|
|
1356
|
+
base_hash = hash((statement.sql, params_fingerprint, statement.is_many, statement.is_script))
|
|
1357
|
+
return f"compiled:{base_hash}:{context_hash}"
|
|
1358
|
+
|
|
1359
|
+
def _get_dominant_parameter_style(self, parameters: "list[Any]") -> "ParameterStyle | None":
|
|
1360
|
+
"""Determine the dominant parameter style from parameter info list.
|
|
1361
|
+
|
|
1362
|
+
Args:
|
|
1363
|
+
parameters: List of ParameterInfo objects from validator.extract_parameters()
|
|
1364
|
+
|
|
1365
|
+
Returns:
|
|
1366
|
+
The dominant parameter style, or None if no parameters
|
|
1367
|
+
|
|
1368
|
+
"""
|
|
1369
|
+
if not parameters:
|
|
1370
|
+
return None
|
|
1371
|
+
|
|
1372
|
+
style_counts: dict[ParameterStyle, int] = {}
|
|
1373
|
+
for param in parameters:
|
|
1374
|
+
style_counts[param.style] = style_counts.get(param.style, 0) + 1
|
|
1375
|
+
|
|
1376
|
+
precedence = {
|
|
1377
|
+
ParameterStyle.QMARK: 1,
|
|
1378
|
+
ParameterStyle.NUMERIC: 2,
|
|
1379
|
+
ParameterStyle.POSITIONAL_COLON: 3,
|
|
1380
|
+
ParameterStyle.POSITIONAL_PYFORMAT: 4,
|
|
1381
|
+
ParameterStyle.NAMED_AT: 5,
|
|
1382
|
+
ParameterStyle.NAMED_DOLLAR: 6,
|
|
1383
|
+
ParameterStyle.NAMED_COLON: 7,
|
|
1384
|
+
ParameterStyle.NAMED_PYFORMAT: 8,
|
|
1385
|
+
}
|
|
1386
|
+
|
|
1387
|
+
return _select_dominant_style(style_counts, precedence)
|
|
1388
|
+
|
|
1389
|
+
@staticmethod
|
|
1390
|
+
def find_filter(
|
|
1391
|
+
filter_type: "type[FilterTypeT]",
|
|
1392
|
+
filters: "Sequence[StatementFilter | StatementParameters] | Sequence[StatementFilter]",
|
|
1393
|
+
) -> "FilterTypeT | None":
|
|
1394
|
+
"""Get the filter specified by filter type from the filters.
|
|
1395
|
+
|
|
1396
|
+
Args:
|
|
1397
|
+
filter_type: The type of filter to find.
|
|
1398
|
+
filters: filter types to apply to the query
|
|
1399
|
+
|
|
1400
|
+
Returns:
|
|
1401
|
+
The match filter instance or None
|
|
1402
|
+
|
|
1403
|
+
"""
|
|
1404
|
+
for filter_ in filters:
|
|
1405
|
+
if isinstance(filter_, filter_type):
|
|
1406
|
+
return filter_
|
|
1407
|
+
return None
|
|
1408
|
+
|
|
1409
|
+
def _create_count_query(self, original_sql: "SQL") -> "SQL":
|
|
1410
|
+
"""Create a COUNT query from the original SQL statement.
|
|
1411
|
+
|
|
1412
|
+
Transforms the original SELECT statement to count total rows while preserving
|
|
1413
|
+
WHERE, HAVING, and GROUP BY clauses but removing ORDER BY, LIMIT, and OFFSET.
|
|
1414
|
+
Copies any existing ``WITH`` clause (sqlglot stores it under ``with_``) and falls back to inferred tables if the FROM clause is missing.
|
|
1415
|
+
When GROUP BY, JOINs, or a WITH clause exist we wrap the payload in a subquery before counting.
|
|
1416
|
+
"""
|
|
1417
|
+
if not original_sql.expression:
|
|
1418
|
+
original_sql.compile()
|
|
1419
|
+
|
|
1420
|
+
if not original_sql.expression:
|
|
1421
|
+
msg = "Cannot create COUNT query from empty SQL expression"
|
|
1422
|
+
raise ImproperConfigurationError(msg)
|
|
1423
|
+
|
|
1424
|
+
expr = original_sql.expression
|
|
1425
|
+
cte: exp.Expression | None = None
|
|
1426
|
+
if isinstance(expr, exp.Expression): # pyright: ignore
|
|
1427
|
+
cte = expr.args.get("with_")
|
|
1428
|
+
if cte is not None:
|
|
1429
|
+
expr = expr.copy()
|
|
1430
|
+
expr.set("with_", None)
|
|
1431
|
+
|
|
1432
|
+
if isinstance(expr, exp.Select):
|
|
1433
|
+
from_clause = expr.args.get("from")
|
|
1434
|
+
if from_clause is None:
|
|
1435
|
+
from_clause = expr.args.get("froms")
|
|
1436
|
+
if from_clause is None:
|
|
1437
|
+
tables = list(expr.find_all(exp.Table))
|
|
1438
|
+
if tables:
|
|
1439
|
+
first_table = tables[0]
|
|
1440
|
+
from_clause = exp.from_(first_table)
|
|
1441
|
+
if from_clause is None:
|
|
1442
|
+
msg = (
|
|
1443
|
+
"Cannot create COUNT query: SELECT statement missing FROM clause. "
|
|
1444
|
+
"COUNT queries require a FROM clause to determine which table to count rows from."
|
|
1445
|
+
)
|
|
1446
|
+
raise ImproperConfigurationError(msg)
|
|
1447
|
+
|
|
1448
|
+
has_group = expr.args.get("group")
|
|
1449
|
+
has_joins = expr.args.get("joins")
|
|
1450
|
+
needs_subquery = has_group or has_joins or cte is not None
|
|
1451
|
+
if needs_subquery:
|
|
1452
|
+
subquery_expr = expr.copy()
|
|
1453
|
+
subquery_expr.set("order", None)
|
|
1454
|
+
subquery_expr.set("limit", None)
|
|
1455
|
+
subquery_expr.set("offset", None)
|
|
1456
|
+
subquery = subquery_expr.subquery(alias="grouped_data")
|
|
1457
|
+
count_expr = exp.select(exp.Count(this=exp.Star())).from_(subquery)
|
|
1458
|
+
else:
|
|
1459
|
+
source_from = cast("exp.Expression", from_clause)
|
|
1460
|
+
count_expr = exp.select(exp.Count(this=exp.Star())).from_(source_from, copy=False)
|
|
1461
|
+
if expr.args.get("where"):
|
|
1462
|
+
count_expr = count_expr.where(cast("exp.Expression", expr.args.get("where")), copy=False)
|
|
1463
|
+
if expr.args.get("having"):
|
|
1464
|
+
count_expr = count_expr.having(cast("exp.Expression", expr.args.get("having")), copy=False)
|
|
1465
|
+
|
|
1466
|
+
count_expr.set("order", None)
|
|
1467
|
+
count_expr.set("limit", None)
|
|
1468
|
+
count_expr.set("offset", None)
|
|
1469
|
+
|
|
1470
|
+
if cte is not None:
|
|
1471
|
+
count_expr.set("with_", cte.copy())
|
|
1472
|
+
return SQL(count_expr, *original_sql.positional_parameters, statement_config=original_sql.statement_config)
|
|
1473
|
+
|
|
1474
|
+
subquery = cast("exp.Select", expr).subquery(alias="total_query")
|
|
1475
|
+
count_expr = exp.select(exp.Count(this=exp.Star())).from_(subquery)
|
|
1476
|
+
if cte is not None:
|
|
1477
|
+
count_expr.set("with_", cte.copy())
|
|
1478
|
+
return SQL(count_expr, *original_sql.positional_parameters, statement_config=original_sql.statement_config)
|