sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +140 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +315 -0
- sqlspec/_typing.py +700 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_typing.py +82 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +1273 -0
- sqlspec/adapters/adbc/config.py +295 -0
- sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/core.py +735 -0
- sqlspec/adapters/adbc/data_dictionary.py +334 -0
- sqlspec/adapters/adbc/driver.py +529 -0
- sqlspec/adapters/adbc/events/__init__.py +5 -0
- sqlspec/adapters/adbc/events/store.py +285 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +502 -0
- sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/type_converter.py +140 -0
- sqlspec/adapters/aiosqlite/__init__.py +25 -0
- sqlspec/adapters/aiosqlite/_typing.py +82 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +818 -0
- sqlspec/adapters/aiosqlite/config.py +334 -0
- sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/aiosqlite/core.py +315 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
- sqlspec/adapters/aiosqlite/driver.py +313 -0
- sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/events/store.py +20 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
- sqlspec/adapters/aiosqlite/pool.py +533 -0
- sqlspec/adapters/asyncmy/__init__.py +21 -0
- sqlspec/adapters/asyncmy/_typing.py +87 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +703 -0
- sqlspec/adapters/asyncmy/config.py +302 -0
- sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncmy/core.py +360 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
- sqlspec/adapters/asyncmy/driver.py +383 -0
- sqlspec/adapters/asyncmy/events/__init__.py +5 -0
- sqlspec/adapters/asyncmy/events/store.py +104 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +19 -0
- sqlspec/adapters/asyncpg/_typing.py +88 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +748 -0
- sqlspec/adapters/asyncpg/config.py +569 -0
- sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncpg/core.py +367 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
- sqlspec/adapters/asyncpg/driver.py +487 -0
- sqlspec/adapters/asyncpg/events/__init__.py +6 -0
- sqlspec/adapters/asyncpg/events/backend.py +286 -0
- sqlspec/adapters/asyncpg/events/store.py +40 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +251 -0
- sqlspec/adapters/bigquery/__init__.py +14 -0
- sqlspec/adapters/bigquery/_typing.py +86 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +827 -0
- sqlspec/adapters/bigquery/config.py +353 -0
- sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/core.py +715 -0
- sqlspec/adapters/bigquery/data_dictionary.py +128 -0
- sqlspec/adapters/bigquery/driver.py +548 -0
- sqlspec/adapters/bigquery/events/__init__.py +5 -0
- sqlspec/adapters/bigquery/events/store.py +139 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +325 -0
- sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/type_converter.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
- sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
- sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
- sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
- sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
- sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
- sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
- sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
- sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
- sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
- sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
- sqlspec/adapters/cockroach_psycopg/config.py +484 -0
- sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_psycopg/core.py +63 -0
- sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
- sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
- sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
- sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
- sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
- sqlspec/adapters/duckdb/__init__.py +25 -0
- sqlspec/adapters/duckdb/_typing.py +81 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +850 -0
- sqlspec/adapters/duckdb/config.py +463 -0
- sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/core.py +257 -0
- sqlspec/adapters/duckdb/data_dictionary.py +140 -0
- sqlspec/adapters/duckdb/driver.py +430 -0
- sqlspec/adapters/duckdb/events/__init__.py +5 -0
- sqlspec/adapters/duckdb/events/store.py +57 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +330 -0
- sqlspec/adapters/duckdb/pool.py +293 -0
- sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/type_converter.py +118 -0
- sqlspec/adapters/mock/__init__.py +72 -0
- sqlspec/adapters/mock/_typing.py +147 -0
- sqlspec/adapters/mock/config.py +483 -0
- sqlspec/adapters/mock/core.py +319 -0
- sqlspec/adapters/mock/data_dictionary.py +366 -0
- sqlspec/adapters/mock/driver.py +721 -0
- sqlspec/adapters/mysqlconnector/__init__.py +36 -0
- sqlspec/adapters/mysqlconnector/_typing.py +141 -0
- sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
- sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
- sqlspec/adapters/mysqlconnector/config.py +394 -0
- sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/mysqlconnector/core.py +303 -0
- sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
- sqlspec/adapters/mysqlconnector/driver.py +483 -0
- sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
- sqlspec/adapters/mysqlconnector/events/store.py +98 -0
- sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
- sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
- sqlspec/adapters/oracledb/__init__.py +60 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
- sqlspec/adapters/oracledb/_typing.py +182 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
- sqlspec/adapters/oracledb/adk/__init__.py +10 -0
- sqlspec/adapters/oracledb/adk/store.py +2369 -0
- sqlspec/adapters/oracledb/config.py +550 -0
- sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/core.py +543 -0
- sqlspec/adapters/oracledb/data_dictionary.py +536 -0
- sqlspec/adapters/oracledb/driver.py +1229 -0
- sqlspec/adapters/oracledb/events/__init__.py +16 -0
- sqlspec/adapters/oracledb/events/backend.py +347 -0
- sqlspec/adapters/oracledb/events/store.py +420 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +781 -0
- sqlspec/adapters/oracledb/migrations.py +535 -0
- sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/type_converter.py +211 -0
- sqlspec/adapters/psqlpy/__init__.py +17 -0
- sqlspec/adapters/psqlpy/_typing.py +79 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +766 -0
- sqlspec/adapters/psqlpy/config.py +304 -0
- sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/core.py +480 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
- sqlspec/adapters/psqlpy/driver.py +438 -0
- sqlspec/adapters/psqlpy/events/__init__.py +6 -0
- sqlspec/adapters/psqlpy/events/backend.py +310 -0
- sqlspec/adapters/psqlpy/events/store.py +20 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +270 -0
- sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/type_converter.py +113 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_typing.py +164 -0
- sqlspec/adapters/psycopg/adk/__init__.py +10 -0
- sqlspec/adapters/psycopg/adk/store.py +1387 -0
- sqlspec/adapters/psycopg/config.py +576 -0
- sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/core.py +450 -0
- sqlspec/adapters/psycopg/data_dictionary.py +289 -0
- sqlspec/adapters/psycopg/driver.py +975 -0
- sqlspec/adapters/psycopg/events/__init__.py +20 -0
- sqlspec/adapters/psycopg/events/backend.py +458 -0
- sqlspec/adapters/psycopg/events/store.py +42 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +552 -0
- sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/type_converter.py +93 -0
- sqlspec/adapters/pymysql/__init__.py +21 -0
- sqlspec/adapters/pymysql/_typing.py +71 -0
- sqlspec/adapters/pymysql/adk/__init__.py +5 -0
- sqlspec/adapters/pymysql/adk/store.py +540 -0
- sqlspec/adapters/pymysql/config.py +195 -0
- sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/pymysql/core.py +299 -0
- sqlspec/adapters/pymysql/data_dictionary.py +122 -0
- sqlspec/adapters/pymysql/driver.py +259 -0
- sqlspec/adapters/pymysql/events/__init__.py +5 -0
- sqlspec/adapters/pymysql/events/store.py +50 -0
- sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
- sqlspec/adapters/pymysql/litestar/store.py +232 -0
- sqlspec/adapters/pymysql/pool.py +137 -0
- sqlspec/adapters/spanner/__init__.py +40 -0
- sqlspec/adapters/spanner/_typing.py +86 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +732 -0
- sqlspec/adapters/spanner/config.py +352 -0
- sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/core.py +188 -0
- sqlspec/adapters/spanner/data_dictionary.py +120 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
- sqlspec/adapters/spanner/driver.py +373 -0
- sqlspec/adapters/spanner/events/__init__.py +5 -0
- sqlspec/adapters/spanner/events/store.py +187 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +291 -0
- sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/type_converter.py +331 -0
- sqlspec/adapters/sqlite/__init__.py +19 -0
- sqlspec/adapters/sqlite/_typing.py +80 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +958 -0
- sqlspec/adapters/sqlite/config.py +280 -0
- sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/core.py +312 -0
- sqlspec/adapters/sqlite/data_dictionary.py +202 -0
- sqlspec/adapters/sqlite/driver.py +359 -0
- sqlspec/adapters/sqlite/events/__init__.py +5 -0
- sqlspec/adapters/sqlite/events/store.py +20 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +316 -0
- sqlspec/adapters/sqlite/pool.py +198 -0
- sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/type_converter.py +114 -0
- sqlspec/base.py +747 -0
- sqlspec/builder/__init__.py +179 -0
- sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_base.py +1022 -0
- sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_column.py +521 -0
- sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_delete.py +95 -0
- sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_dml.py +365 -0
- sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_explain.py +579 -0
- sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_factory.py +1697 -0
- sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_insert.py +328 -0
- sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_join.py +499 -0
- sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_merge.py +821 -0
- sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_select.py +1660 -0
- sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_temporal.py +139 -0
- sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_update.py +173 -0
- sqlspec/builder/_vector_expressions.py +267 -0
- sqlspec/cli.py +911 -0
- sqlspec/config.py +1755 -0
- sqlspec/core/__init__.py +374 -0
- sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/_correlation.py +176 -0
- sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +1069 -0
- sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +954 -0
- sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/explain.py +275 -0
- sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +952 -0
- sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +262 -0
- sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +71 -0
- sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_alignment.py +270 -0
- sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_converter.py +543 -0
- sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_processor.py +505 -0
- sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_registry.py +206 -0
- sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_transformers.py +292 -0
- sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_types.py +499 -0
- sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_validator.py +180 -0
- sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/pipeline.py +319 -0
- sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/query_modifiers.py +437 -0
- sqlspec/core/result/__init__.py +23 -0
- sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_base.py +1121 -0
- sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_io.py +28 -0
- sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +966 -0
- sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +1503 -0
- sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/type_converter.py +339 -0
- sqlspec/data_dictionary/__init__.py +22 -0
- sqlspec/data_dictionary/_loader.py +123 -0
- sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_registry.py +74 -0
- sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_types.py +121 -0
- sqlspec/data_dictionary/dialects/__init__.py +21 -0
- sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/bigquery.py +49 -0
- sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
- sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/duckdb.py +47 -0
- sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/mysql.py +42 -0
- sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/oracle.py +34 -0
- sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/postgres.py +46 -0
- sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/spanner.py +37 -0
- sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/sqlite.py +42 -0
- sqlspec/data_dictionary/sql/.gitkeep +0 -0
- sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
- sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
- sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
- sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
- sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
- sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
- sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
- sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
- sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
- sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
- sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
- sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
- sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
- sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
- sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
- sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
- sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
- sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
- sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
- sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
- sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
- sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
- sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
- sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
- sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
- sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
- sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
- sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
- sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
- sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
- sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
- sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
- sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
- sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
- sqlspec/driver/__init__.py +32 -0
- sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_async.py +1737 -0
- sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_common.py +1478 -0
- sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sql_helpers.py +148 -0
- sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_storage_helpers.py +144 -0
- sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sync.py +1710 -0
- sqlspec/exceptions.py +338 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +70 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/memory/__init__.py +69 -0
- sqlspec/extensions/adk/memory/_types.py +30 -0
- sqlspec/extensions/adk/memory/converters.py +149 -0
- sqlspec/extensions/adk/memory/service.py +217 -0
- sqlspec/extensions/adk/memory/store.py +569 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +225 -0
- sqlspec/extensions/adk/store.py +567 -0
- sqlspec/extensions/events/__init__.py +51 -0
- sqlspec/extensions/events/_channel.py +703 -0
- sqlspec/extensions/events/_hints.py +45 -0
- sqlspec/extensions/events/_models.py +23 -0
- sqlspec/extensions/events/_payload.py +69 -0
- sqlspec/extensions/events/_protocols.py +134 -0
- sqlspec/extensions/events/_queue.py +461 -0
- sqlspec/extensions/events/_store.py +209 -0
- sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
- sqlspec/extensions/events/migrations/__init__.py +3 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +351 -0
- sqlspec/extensions/fastapi/providers.py +607 -0
- sqlspec/extensions/flask/__init__.py +37 -0
- sqlspec/extensions/flask/_state.py +76 -0
- sqlspec/extensions/flask/_utils.py +71 -0
- sqlspec/extensions/flask/extension.py +519 -0
- sqlspec/extensions/litestar/__init__.py +28 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/channels.py +165 -0
- sqlspec/extensions/litestar/cli.py +102 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +671 -0
- sqlspec/extensions/litestar/providers.py +526 -0
- sqlspec/extensions/litestar/store.py +296 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +113 -0
- sqlspec/extensions/starlette/__init__.py +19 -0
- sqlspec/extensions/starlette/_state.py +30 -0
- sqlspec/extensions/starlette/_utils.py +96 -0
- sqlspec/extensions/starlette/extension.py +346 -0
- sqlspec/extensions/starlette/middleware.py +235 -0
- sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +702 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +731 -0
- sqlspec/migrations/commands.py +1232 -0
- sqlspec/migrations/context.py +157 -0
- sqlspec/migrations/fix.py +204 -0
- sqlspec/migrations/loaders.py +443 -0
- sqlspec/migrations/runner.py +1172 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +611 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +207 -0
- sqlspec/migrations/version.py +446 -0
- sqlspec/observability/__init__.py +55 -0
- sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_common.py +77 -0
- sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_config.py +348 -0
- sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_diagnostics.py +74 -0
- sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_dispatcher.py +152 -0
- sqlspec/observability/_formatters/__init__.py +13 -0
- sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_aws.py +102 -0
- sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_azure.py +96 -0
- sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_base.py +57 -0
- sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_gcp.py +131 -0
- sqlspec/observability/_formatting.py +58 -0
- sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_observer.py +357 -0
- sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_runtime.py +420 -0
- sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_sampling.py +188 -0
- sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_spans.py +161 -0
- sqlspec/protocols.py +916 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +48 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +253 -0
- sqlspec/storage/backends/fsspec.py +529 -0
- sqlspec/storage/backends/local.py +441 -0
- sqlspec/storage/backends/obstore.py +916 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +582 -0
- sqlspec/storage/registry.py +301 -0
- sqlspec/typing.py +395 -0
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/arrow_helpers.py +318 -0
- sqlspec/utils/config_tools.py +332 -0
- sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/correlation.py +134 -0
- sqlspec/utils/deprecation.py +190 -0
- sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +258 -0
- sqlspec/utils/logging.py +222 -0
- sqlspec/utils/module_loader.py +306 -0
- sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/portal.py +375 -0
- sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/schema.py +485 -0
- sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/serializers.py +408 -0
- sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +311 -0
- sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_converters.py +128 -0
- sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1360 -0
- sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/uuids.py +225 -0
- sqlspec-0.36.0.dist-info/METADATA +205 -0
- sqlspec-0.36.0.dist-info/RECORD +531 -0
- sqlspec-0.36.0.dist-info/WHEEL +7 -0
- sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1121 @@
|
|
|
1
|
+
"""SQL result classes for query execution results.
|
|
2
|
+
|
|
3
|
+
This module provides result classes for handling SQL query execution results
|
|
4
|
+
including regular results and Apache Arrow format results.
|
|
5
|
+
|
|
6
|
+
Classes:
|
|
7
|
+
StatementResult: Abstract base class for SQL results.
|
|
8
|
+
SQLResult: Standard implementation for regular results.
|
|
9
|
+
ArrowResult: Apache Arrow format results for data interchange.
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
from abc import ABC, abstractmethod
|
|
13
|
+
from collections.abc import Iterable, Iterator
|
|
14
|
+
from typing import TYPE_CHECKING, Any, cast, overload
|
|
15
|
+
|
|
16
|
+
from mypy_extensions import mypyc_attr
|
|
17
|
+
from typing_extensions import TypeVar
|
|
18
|
+
|
|
19
|
+
from sqlspec.core.result._io import rows_to_pandas, rows_to_polars
|
|
20
|
+
from sqlspec.core.statement import SQL
|
|
21
|
+
from sqlspec.storage import (
|
|
22
|
+
AsyncStoragePipeline,
|
|
23
|
+
StorageDestination,
|
|
24
|
+
StorageFormat,
|
|
25
|
+
StorageTelemetry,
|
|
26
|
+
SyncStoragePipeline,
|
|
27
|
+
)
|
|
28
|
+
from sqlspec.utils.arrow_helpers import (
|
|
29
|
+
arrow_table_column_names,
|
|
30
|
+
arrow_table_num_columns,
|
|
31
|
+
arrow_table_num_rows,
|
|
32
|
+
arrow_table_to_pandas,
|
|
33
|
+
arrow_table_to_polars,
|
|
34
|
+
arrow_table_to_pylist,
|
|
35
|
+
arrow_table_to_return_format,
|
|
36
|
+
cast_arrow_table_schema,
|
|
37
|
+
convert_dict_to_arrow,
|
|
38
|
+
ensure_arrow_table,
|
|
39
|
+
)
|
|
40
|
+
from sqlspec.utils.schema import to_schema
|
|
41
|
+
|
|
42
|
+
if TYPE_CHECKING:
|
|
43
|
+
from sqlspec.core.compiler import OperationType
|
|
44
|
+
from sqlspec.typing import ArrowReturnFormat, ArrowTable, PandasDataFrame, PolarsDataFrame, SchemaT
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
__all__ = ("ArrowResult", "EmptyResult", "SQLResult", "StackResult", "StatementResult")
|
|
48
|
+
|
|
49
|
+
T = TypeVar("T")
|
|
50
|
+
_EMPTY_RESULT_STATEMENT = SQL("-- empty stack result --")
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
54
|
+
class StatementResult(ABC, Iterable[Any]):
|
|
55
|
+
"""Abstract base class for SQL statement execution results.
|
|
56
|
+
|
|
57
|
+
Provides a common interface for handling different types of SQL operation
|
|
58
|
+
results. Subclasses implement specific behavior for SELECT, INSERT, UPDATE,
|
|
59
|
+
DELETE, and script operations.
|
|
60
|
+
|
|
61
|
+
Attributes:
|
|
62
|
+
statement: The original SQL statement that was executed.
|
|
63
|
+
data: The result data from the operation.
|
|
64
|
+
rows_affected: Number of rows affected by the operation.
|
|
65
|
+
last_inserted_id: Last inserted ID from INSERT operations.
|
|
66
|
+
execution_time: Time taken to execute the statement in seconds.
|
|
67
|
+
metadata: Additional metadata about the operation.
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
__slots__ = ("data", "execution_time", "last_inserted_id", "metadata", "rows_affected", "statement")
|
|
71
|
+
|
|
72
|
+
def __init__(
|
|
73
|
+
self,
|
|
74
|
+
statement: "SQL",
|
|
75
|
+
data: Any = None,
|
|
76
|
+
rows_affected: int = 0,
|
|
77
|
+
last_inserted_id: int | str | None = None,
|
|
78
|
+
execution_time: float | None = None,
|
|
79
|
+
metadata: "dict[str, Any] | None" = None,
|
|
80
|
+
) -> None:
|
|
81
|
+
"""Initialize statement result.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
statement: The original SQL statement that was executed.
|
|
85
|
+
data: The result data from the operation.
|
|
86
|
+
rows_affected: Number of rows affected by the operation.
|
|
87
|
+
last_inserted_id: Last inserted ID from the operation.
|
|
88
|
+
execution_time: Time taken to execute the statement in seconds.
|
|
89
|
+
metadata: Additional metadata about the operation.
|
|
90
|
+
"""
|
|
91
|
+
self.statement = statement
|
|
92
|
+
self.data = data
|
|
93
|
+
self.rows_affected = rows_affected
|
|
94
|
+
self.last_inserted_id = last_inserted_id
|
|
95
|
+
self.execution_time = execution_time
|
|
96
|
+
self.metadata = metadata if metadata is not None else {}
|
|
97
|
+
|
|
98
|
+
@abstractmethod
|
|
99
|
+
def __iter__(self) -> "Iterator[Any]":
|
|
100
|
+
"""Iterate over result rows."""
|
|
101
|
+
|
|
102
|
+
@abstractmethod
|
|
103
|
+
def is_success(self) -> bool:
|
|
104
|
+
"""Check if the operation was successful.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
True if the operation completed successfully, False otherwise.
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
@abstractmethod
|
|
111
|
+
def get_data(self) -> "Any":
|
|
112
|
+
"""Get the processed data from the result.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
The processed result data in an appropriate format.
|
|
116
|
+
"""
|
|
117
|
+
|
|
118
|
+
def get_metadata(self, key: str, default: Any = None) -> Any:
|
|
119
|
+
"""Get metadata value by key.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
key: The metadata key to retrieve.
|
|
123
|
+
default: Default value if key is not found.
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
The metadata value or default.
|
|
127
|
+
"""
|
|
128
|
+
return self.metadata.get(key, default)
|
|
129
|
+
|
|
130
|
+
def set_metadata(self, key: str, value: Any) -> None:
|
|
131
|
+
"""Set metadata value by key.
|
|
132
|
+
|
|
133
|
+
Args:
|
|
134
|
+
key: The metadata key to set.
|
|
135
|
+
value: The value to set.
|
|
136
|
+
"""
|
|
137
|
+
self.metadata[key] = value
|
|
138
|
+
|
|
139
|
+
@property
|
|
140
|
+
def operation_type(self) -> "OperationType":
|
|
141
|
+
"""Get operation type from the statement.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
The type of SQL operation that produced this result.
|
|
145
|
+
"""
|
|
146
|
+
return self.statement.operation_type
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
150
|
+
class SQLResult(StatementResult):
|
|
151
|
+
"""Result class for SQL operations that return rows or affect rows.
|
|
152
|
+
|
|
153
|
+
Handles SELECT, INSERT, UPDATE, DELETE operations. For DML operations with
|
|
154
|
+
RETURNING clauses, the returned data is stored in the data attribute.
|
|
155
|
+
The operation_type attribute indicates the nature of the operation.
|
|
156
|
+
|
|
157
|
+
For script execution, tracks multiple statement results and errors.
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
__slots__ = (
|
|
161
|
+
"_operation_type",
|
|
162
|
+
"column_names",
|
|
163
|
+
"error",
|
|
164
|
+
"errors",
|
|
165
|
+
"has_more",
|
|
166
|
+
"inserted_ids",
|
|
167
|
+
"operation_index",
|
|
168
|
+
"parameters",
|
|
169
|
+
"statement_results",
|
|
170
|
+
"successful_statements",
|
|
171
|
+
"total_count",
|
|
172
|
+
"total_statements",
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
_operation_type: "OperationType"
|
|
176
|
+
|
|
177
|
+
def __init__(
|
|
178
|
+
self,
|
|
179
|
+
statement: "SQL",
|
|
180
|
+
data: "list[dict[str, Any]] | None" = None,
|
|
181
|
+
rows_affected: int = 0,
|
|
182
|
+
last_inserted_id: int | str | None = None,
|
|
183
|
+
execution_time: float | None = None,
|
|
184
|
+
metadata: "dict[str, Any] | None" = None,
|
|
185
|
+
error: Exception | None = None,
|
|
186
|
+
operation_type: "OperationType" = "SELECT",
|
|
187
|
+
operation_index: int | None = None,
|
|
188
|
+
parameters: Any | None = None,
|
|
189
|
+
column_names: "list[str] | None" = None,
|
|
190
|
+
total_count: int | None = None,
|
|
191
|
+
has_more: bool = False,
|
|
192
|
+
inserted_ids: "list[int | str] | None" = None,
|
|
193
|
+
statement_results: "list[SQLResult] | None" = None,
|
|
194
|
+
errors: "list[str] | None" = None,
|
|
195
|
+
total_statements: int = 0,
|
|
196
|
+
successful_statements: int = 0,
|
|
197
|
+
) -> None:
|
|
198
|
+
"""Initialize SQL result.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
statement: The original SQL statement that was executed.
|
|
202
|
+
data: The result data from the operation.
|
|
203
|
+
rows_affected: Number of rows affected by the operation.
|
|
204
|
+
last_inserted_id: Last inserted ID from the operation.
|
|
205
|
+
execution_time: Time taken to execute the statement in seconds.
|
|
206
|
+
metadata: Additional metadata about the operation.
|
|
207
|
+
error: Exception that occurred during execution.
|
|
208
|
+
operation_type: Type of SQL operation performed.
|
|
209
|
+
operation_index: Index of operation in a script.
|
|
210
|
+
parameters: Parameters used for the query.
|
|
211
|
+
column_names: Names of columns in the result set.
|
|
212
|
+
total_count: Total number of rows in the complete result set.
|
|
213
|
+
has_more: Whether there are additional result pages available.
|
|
214
|
+
inserted_ids: List of IDs from INSERT operations.
|
|
215
|
+
statement_results: Results from individual statements in a script.
|
|
216
|
+
errors: List of error messages for script execution.
|
|
217
|
+
total_statements: Total number of statements in a script.
|
|
218
|
+
successful_statements: Count of successful statements in a script.
|
|
219
|
+
"""
|
|
220
|
+
super().__init__(
|
|
221
|
+
statement=statement,
|
|
222
|
+
data=data,
|
|
223
|
+
rows_affected=rows_affected,
|
|
224
|
+
last_inserted_id=last_inserted_id,
|
|
225
|
+
execution_time=execution_time,
|
|
226
|
+
metadata=metadata,
|
|
227
|
+
)
|
|
228
|
+
self.error = error
|
|
229
|
+
self._operation_type = operation_type
|
|
230
|
+
self.operation_index = operation_index
|
|
231
|
+
self.parameters = parameters
|
|
232
|
+
|
|
233
|
+
self.column_names = column_names or []
|
|
234
|
+
self.total_count = total_count
|
|
235
|
+
self.has_more = has_more
|
|
236
|
+
self.inserted_ids = inserted_ids or []
|
|
237
|
+
self.statement_results = statement_results or []
|
|
238
|
+
self.errors = errors or []
|
|
239
|
+
self.total_statements = total_statements
|
|
240
|
+
self.successful_statements = successful_statements
|
|
241
|
+
|
|
242
|
+
if not self.column_names and data and len(data) > 0:
|
|
243
|
+
self.column_names = list(data[0].keys())
|
|
244
|
+
if self.total_count is None:
|
|
245
|
+
self.total_count = len(data) if data is not None else 0
|
|
246
|
+
|
|
247
|
+
@property
|
|
248
|
+
def operation_type(self) -> "OperationType":
|
|
249
|
+
"""Get operation type for this result.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
The type of SQL operation that produced this result.
|
|
253
|
+
"""
|
|
254
|
+
return self._operation_type
|
|
255
|
+
|
|
256
|
+
def _get_rows(self) -> "list[dict[str, Any]]":
|
|
257
|
+
"""Get validated row data as list of dicts.
|
|
258
|
+
|
|
259
|
+
Returns:
|
|
260
|
+
List of row dictionaries, empty list if no data.
|
|
261
|
+
"""
|
|
262
|
+
if self.data is None:
|
|
263
|
+
return []
|
|
264
|
+
if not isinstance(self.data, list):
|
|
265
|
+
return []
|
|
266
|
+
return self.data
|
|
267
|
+
|
|
268
|
+
def get_metadata(self, key: str, default: Any = None) -> Any:
|
|
269
|
+
"""Get metadata value by key.
|
|
270
|
+
|
|
271
|
+
Args:
|
|
272
|
+
key: The metadata key to retrieve.
|
|
273
|
+
default: Default value if key is not found.
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
The metadata value or default.
|
|
277
|
+
"""
|
|
278
|
+
return self.metadata.get(key, default)
|
|
279
|
+
|
|
280
|
+
def set_metadata(self, key: str, value: Any) -> None:
|
|
281
|
+
"""Set metadata value by key.
|
|
282
|
+
|
|
283
|
+
Args:
|
|
284
|
+
key: The metadata key to set.
|
|
285
|
+
value: The value to set.
|
|
286
|
+
"""
|
|
287
|
+
self.metadata[key] = value
|
|
288
|
+
|
|
289
|
+
def is_success(self) -> bool:
|
|
290
|
+
"""Check if the operation was successful.
|
|
291
|
+
|
|
292
|
+
Returns:
|
|
293
|
+
True if operation was successful, False otherwise.
|
|
294
|
+
"""
|
|
295
|
+
op_type = self.operation_type.upper()
|
|
296
|
+
|
|
297
|
+
if op_type == "SCRIPT" or self.statement_results:
|
|
298
|
+
return not self.errors and self.total_statements == self.successful_statements
|
|
299
|
+
|
|
300
|
+
if op_type == "SELECT":
|
|
301
|
+
return self.data is not None and self.rows_affected >= 0
|
|
302
|
+
|
|
303
|
+
if op_type in {"INSERT", "UPDATE", "DELETE", "EXECUTE"}:
|
|
304
|
+
return self.rows_affected >= 0
|
|
305
|
+
|
|
306
|
+
return False
|
|
307
|
+
|
|
308
|
+
@overload
|
|
309
|
+
def get_data(self, *, schema_type: "type[SchemaT]") -> "list[SchemaT]": ...
|
|
310
|
+
|
|
311
|
+
@overload
|
|
312
|
+
def get_data(self, *, schema_type: None = None) -> "list[dict[str, Any]]": ...
|
|
313
|
+
|
|
314
|
+
def get_data(self, *, schema_type: "type[SchemaT] | None" = None) -> "list[SchemaT] | list[dict[str, Any]]":
|
|
315
|
+
"""Get the data from the result.
|
|
316
|
+
|
|
317
|
+
For regular operations, returns the list of rows.
|
|
318
|
+
For script operations, returns a summary dictionary containing
|
|
319
|
+
execution statistics and results.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
schema_type: Optional schema type to transform the data into.
|
|
323
|
+
Supports Pydantic models, dataclasses, msgspec structs, attrs classes, and TypedDict.
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
List of result rows (optionally transformed to schema_type) or script summary.
|
|
327
|
+
"""
|
|
328
|
+
op_type_upper = self.operation_type.upper()
|
|
329
|
+
if op_type_upper == "SCRIPT":
|
|
330
|
+
failed_statements = self.total_statements - self.successful_statements
|
|
331
|
+
return [
|
|
332
|
+
{
|
|
333
|
+
"total_statements": self.total_statements,
|
|
334
|
+
"successful_statements": self.successful_statements,
|
|
335
|
+
"failed_statements": failed_statements,
|
|
336
|
+
"errors": self.errors,
|
|
337
|
+
"statement_results": self.statement_results,
|
|
338
|
+
"total_rows_affected": self.get_total_rows_affected(),
|
|
339
|
+
}
|
|
340
|
+
]
|
|
341
|
+
data = self._get_rows()
|
|
342
|
+
if schema_type:
|
|
343
|
+
return cast("list[SchemaT]", to_schema(data, schema_type=schema_type))
|
|
344
|
+
return data
|
|
345
|
+
|
|
346
|
+
def add_statement_result(self, result: "SQLResult") -> None:
|
|
347
|
+
"""Add a statement result to the script execution results.
|
|
348
|
+
|
|
349
|
+
Args:
|
|
350
|
+
result: Statement result to add.
|
|
351
|
+
"""
|
|
352
|
+
self.statement_results.append(result)
|
|
353
|
+
self.total_statements += 1
|
|
354
|
+
if result.is_success():
|
|
355
|
+
self.successful_statements += 1
|
|
356
|
+
|
|
357
|
+
def get_total_rows_affected(self) -> int:
|
|
358
|
+
"""Get the total number of rows affected across all statements.
|
|
359
|
+
|
|
360
|
+
Returns:
|
|
361
|
+
Total rows affected.
|
|
362
|
+
"""
|
|
363
|
+
if self.statement_results:
|
|
364
|
+
total = 0
|
|
365
|
+
for stmt in self.statement_results:
|
|
366
|
+
if stmt.rows_affected and stmt.rows_affected > 0:
|
|
367
|
+
total += stmt.rows_affected
|
|
368
|
+
return total
|
|
369
|
+
return self.rows_affected if self.rows_affected and self.rows_affected > 0 else 0
|
|
370
|
+
|
|
371
|
+
@property
|
|
372
|
+
def num_rows(self) -> int:
|
|
373
|
+
"""Get the number of rows affected (alias for get_total_rows_affected).
|
|
374
|
+
|
|
375
|
+
Returns:
|
|
376
|
+
Total rows affected.
|
|
377
|
+
"""
|
|
378
|
+
return self.get_total_rows_affected()
|
|
379
|
+
|
|
380
|
+
@property
|
|
381
|
+
def num_columns(self) -> int:
|
|
382
|
+
"""Get the number of columns in the result data.
|
|
383
|
+
|
|
384
|
+
Returns:
|
|
385
|
+
Number of columns.
|
|
386
|
+
"""
|
|
387
|
+
return len(self.column_names) if self.column_names else 0
|
|
388
|
+
|
|
389
|
+
@overload
|
|
390
|
+
def get_first(self, *, schema_type: "type[SchemaT]") -> "SchemaT | None": ...
|
|
391
|
+
|
|
392
|
+
@overload
|
|
393
|
+
def get_first(self, *, schema_type: None = None) -> "dict[str, Any] | None": ...
|
|
394
|
+
|
|
395
|
+
def get_first(self, *, schema_type: "type[SchemaT] | None" = None) -> "SchemaT | dict[str, Any] | None":
|
|
396
|
+
"""Get the first row from the result, if any.
|
|
397
|
+
|
|
398
|
+
Args:
|
|
399
|
+
schema_type: Optional schema type to transform the data into.
|
|
400
|
+
Supports Pydantic models, dataclasses, msgspec structs, attrs classes, and TypedDict.
|
|
401
|
+
|
|
402
|
+
Returns:
|
|
403
|
+
First row (optionally transformed to schema_type) or None if no data.
|
|
404
|
+
"""
|
|
405
|
+
rows = self._get_rows()
|
|
406
|
+
if not rows:
|
|
407
|
+
return None
|
|
408
|
+
row = rows[0]
|
|
409
|
+
if schema_type:
|
|
410
|
+
return to_schema(row, schema_type=schema_type)
|
|
411
|
+
return row
|
|
412
|
+
|
|
413
|
+
def get_count(self) -> int:
|
|
414
|
+
"""Get the number of rows in the current result set (e.g., a page of data).
|
|
415
|
+
|
|
416
|
+
Returns:
|
|
417
|
+
Number of rows in current result set.
|
|
418
|
+
"""
|
|
419
|
+
return len(self.data) if self.data is not None else 0
|
|
420
|
+
|
|
421
|
+
def is_empty(self) -> bool:
|
|
422
|
+
"""Check if the result set (self.data) is empty.
|
|
423
|
+
|
|
424
|
+
Returns:
|
|
425
|
+
True if result set is empty.
|
|
426
|
+
"""
|
|
427
|
+
return not self.data if self.data is not None else True
|
|
428
|
+
|
|
429
|
+
def get_affected_count(self) -> int:
|
|
430
|
+
"""Get the number of rows affected by a DML operation.
|
|
431
|
+
|
|
432
|
+
Returns:
|
|
433
|
+
Number of affected rows.
|
|
434
|
+
"""
|
|
435
|
+
return self.rows_affected or 0
|
|
436
|
+
|
|
437
|
+
def was_inserted(self) -> bool:
|
|
438
|
+
"""Check if this was an INSERT operation.
|
|
439
|
+
|
|
440
|
+
Returns:
|
|
441
|
+
True if INSERT operation.
|
|
442
|
+
"""
|
|
443
|
+
return self.operation_type.upper() == "INSERT"
|
|
444
|
+
|
|
445
|
+
def was_updated(self) -> bool:
|
|
446
|
+
"""Check if this was an UPDATE operation.
|
|
447
|
+
|
|
448
|
+
Returns:
|
|
449
|
+
True if UPDATE operation.
|
|
450
|
+
"""
|
|
451
|
+
return self.operation_type.upper() == "UPDATE"
|
|
452
|
+
|
|
453
|
+
def was_deleted(self) -> bool:
|
|
454
|
+
"""Check if this was a DELETE operation.
|
|
455
|
+
|
|
456
|
+
Returns:
|
|
457
|
+
True if DELETE operation.
|
|
458
|
+
"""
|
|
459
|
+
return self.operation_type.upper() == "DELETE"
|
|
460
|
+
|
|
461
|
+
def __len__(self) -> int:
|
|
462
|
+
"""Get the number of rows in the result set.
|
|
463
|
+
|
|
464
|
+
Returns:
|
|
465
|
+
Number of rows in the data.
|
|
466
|
+
"""
|
|
467
|
+
return len(self.data) if self.data is not None else 0
|
|
468
|
+
|
|
469
|
+
def __getitem__(self, index: int) -> "dict[str, Any]":
|
|
470
|
+
"""Get a row by index.
|
|
471
|
+
|
|
472
|
+
Args:
|
|
473
|
+
index: Row index
|
|
474
|
+
|
|
475
|
+
Returns:
|
|
476
|
+
The row at the specified index
|
|
477
|
+
"""
|
|
478
|
+
rows = self._get_rows()
|
|
479
|
+
if not rows:
|
|
480
|
+
msg = "No data available"
|
|
481
|
+
raise IndexError(msg)
|
|
482
|
+
return rows[index]
|
|
483
|
+
|
|
484
|
+
def __iter__(self) -> "Iterator[dict[str, Any]]":
|
|
485
|
+
"""Iterate over the rows in the result.
|
|
486
|
+
|
|
487
|
+
Returns:
|
|
488
|
+
Iterator that yields each row as a dictionary
|
|
489
|
+
"""
|
|
490
|
+
return iter(self._get_rows())
|
|
491
|
+
|
|
492
|
+
@overload
|
|
493
|
+
def all(self, *, schema_type: "type[SchemaT]") -> "list[SchemaT]": ...
|
|
494
|
+
|
|
495
|
+
@overload
|
|
496
|
+
def all(self, *, schema_type: None = None) -> "list[dict[str, Any]]": ...
|
|
497
|
+
|
|
498
|
+
def all(self, *, schema_type: "type[SchemaT] | None" = None) -> "list[SchemaT] | list[dict[str, Any]]":
|
|
499
|
+
"""Return all rows as a list.
|
|
500
|
+
|
|
501
|
+
Args:
|
|
502
|
+
schema_type: Optional schema type to transform the data into.
|
|
503
|
+
Supports Pydantic models, dataclasses, msgspec structs, attrs classes, and TypedDict.
|
|
504
|
+
|
|
505
|
+
Returns:
|
|
506
|
+
List of all rows (optionally transformed to schema_type)
|
|
507
|
+
"""
|
|
508
|
+
data = self._get_rows()
|
|
509
|
+
if schema_type:
|
|
510
|
+
return cast("list[SchemaT]", to_schema(data, schema_type=schema_type))
|
|
511
|
+
return data
|
|
512
|
+
|
|
513
|
+
@overload
|
|
514
|
+
def one(self, *, schema_type: "type[SchemaT]") -> "SchemaT": ...
|
|
515
|
+
|
|
516
|
+
@overload
|
|
517
|
+
def one(self, *, schema_type: None = None) -> "dict[str, Any]": ...
|
|
518
|
+
|
|
519
|
+
def one(self, *, schema_type: "type[SchemaT] | None" = None) -> "SchemaT | dict[str, Any]":
|
|
520
|
+
"""Return exactly one row.
|
|
521
|
+
|
|
522
|
+
Args:
|
|
523
|
+
schema_type: Optional schema type to transform the data into.
|
|
524
|
+
Supports Pydantic models, dataclasses, msgspec structs, attrs classes, and TypedDict.
|
|
525
|
+
|
|
526
|
+
Returns:
|
|
527
|
+
The single row (optionally transformed to schema_type)
|
|
528
|
+
|
|
529
|
+
Raises:
|
|
530
|
+
ValueError: If no results or more than one result
|
|
531
|
+
"""
|
|
532
|
+
rows = self._get_rows()
|
|
533
|
+
if not rows:
|
|
534
|
+
msg = "No result found, exactly one row expected"
|
|
535
|
+
raise ValueError(msg)
|
|
536
|
+
|
|
537
|
+
data_len = len(rows)
|
|
538
|
+
if data_len == 0:
|
|
539
|
+
msg = "No result found, exactly one row expected"
|
|
540
|
+
raise ValueError(msg)
|
|
541
|
+
if data_len > 1:
|
|
542
|
+
msg = f"Multiple results found ({data_len}), exactly one row expected"
|
|
543
|
+
raise ValueError(msg)
|
|
544
|
+
|
|
545
|
+
row = rows[0]
|
|
546
|
+
if schema_type:
|
|
547
|
+
return to_schema(row, schema_type=schema_type)
|
|
548
|
+
return row
|
|
549
|
+
|
|
550
|
+
@overload
|
|
551
|
+
def one_or_none(self, *, schema_type: "type[SchemaT]") -> "SchemaT | None": ...
|
|
552
|
+
|
|
553
|
+
@overload
|
|
554
|
+
def one_or_none(self, *, schema_type: None = None) -> "dict[str, Any] | None": ...
|
|
555
|
+
|
|
556
|
+
def one_or_none(self, *, schema_type: "type[SchemaT] | None" = None) -> "SchemaT | dict[str, Any] | None":
|
|
557
|
+
"""Return at most one row.
|
|
558
|
+
|
|
559
|
+
Args:
|
|
560
|
+
schema_type: Optional schema type to transform the data into.
|
|
561
|
+
Supports Pydantic models, dataclasses, msgspec structs, attrs classes, and TypedDict.
|
|
562
|
+
|
|
563
|
+
Returns:
|
|
564
|
+
The single row (optionally transformed to schema_type) or None if no results
|
|
565
|
+
|
|
566
|
+
Raises:
|
|
567
|
+
ValueError: If more than one result
|
|
568
|
+
"""
|
|
569
|
+
rows = self._get_rows()
|
|
570
|
+
if not rows:
|
|
571
|
+
return None
|
|
572
|
+
|
|
573
|
+
data_len = len(rows)
|
|
574
|
+
if data_len == 0:
|
|
575
|
+
return None
|
|
576
|
+
if data_len > 1:
|
|
577
|
+
msg = f"Multiple results found ({data_len}), at most one row expected"
|
|
578
|
+
raise ValueError(msg)
|
|
579
|
+
|
|
580
|
+
row = rows[0]
|
|
581
|
+
if schema_type:
|
|
582
|
+
return to_schema(row, schema_type=schema_type)
|
|
583
|
+
return row
|
|
584
|
+
|
|
585
|
+
def scalar(self) -> Any:
|
|
586
|
+
"""Return the first column of the first row.
|
|
587
|
+
|
|
588
|
+
Returns:
|
|
589
|
+
The scalar value from first column of first row
|
|
590
|
+
"""
|
|
591
|
+
row = self.one()
|
|
592
|
+
return next(iter(row.values()))
|
|
593
|
+
|
|
594
|
+
def scalar_or_none(self) -> Any:
|
|
595
|
+
"""Return the first column of the first row, or None if no results.
|
|
596
|
+
|
|
597
|
+
Returns:
|
|
598
|
+
The scalar value from first column of first row, or None
|
|
599
|
+
"""
|
|
600
|
+
row = self.one_or_none()
|
|
601
|
+
if row is None:
|
|
602
|
+
return None
|
|
603
|
+
|
|
604
|
+
return next(iter(row.values()))
|
|
605
|
+
|
|
606
|
+
def to_arrow(self) -> "ArrowTable":
|
|
607
|
+
"""Convert result data to Apache Arrow Table.
|
|
608
|
+
|
|
609
|
+
Returns:
|
|
610
|
+
Arrow Table containing the result data.
|
|
611
|
+
|
|
612
|
+
Raises:
|
|
613
|
+
ValueError: If no data available.
|
|
614
|
+
|
|
615
|
+
Examples:
|
|
616
|
+
>>> result = session.select("SELECT * FROM users")
|
|
617
|
+
>>> table = result.to_arrow()
|
|
618
|
+
>>> print(table.num_rows)
|
|
619
|
+
3
|
|
620
|
+
"""
|
|
621
|
+
if self.data is None:
|
|
622
|
+
msg = "No data available"
|
|
623
|
+
raise ValueError(msg)
|
|
624
|
+
|
|
625
|
+
return convert_dict_to_arrow(self.data, return_format="table")
|
|
626
|
+
|
|
627
|
+
def to_pandas(self) -> "PandasDataFrame":
|
|
628
|
+
"""Convert result data to pandas DataFrame.
|
|
629
|
+
|
|
630
|
+
Returns:
|
|
631
|
+
pandas DataFrame containing the result data.
|
|
632
|
+
|
|
633
|
+
Raises:
|
|
634
|
+
ValueError: If no data available.
|
|
635
|
+
|
|
636
|
+
Examples:
|
|
637
|
+
>>> result = session.select("SELECT * FROM users")
|
|
638
|
+
>>> df = result.to_pandas()
|
|
639
|
+
>>> print(df.head())
|
|
640
|
+
"""
|
|
641
|
+
if self.data is None:
|
|
642
|
+
msg = "No data available"
|
|
643
|
+
raise ValueError(msg)
|
|
644
|
+
|
|
645
|
+
return rows_to_pandas(self.data)
|
|
646
|
+
|
|
647
|
+
def to_polars(self) -> "PolarsDataFrame":
|
|
648
|
+
"""Convert result data to Polars DataFrame.
|
|
649
|
+
|
|
650
|
+
Returns:
|
|
651
|
+
Polars DataFrame containing the result data.
|
|
652
|
+
|
|
653
|
+
Raises:
|
|
654
|
+
ValueError: If no data available.
|
|
655
|
+
|
|
656
|
+
Examples:
|
|
657
|
+
>>> result = session.select("SELECT * FROM users")
|
|
658
|
+
>>> df = result.to_polars()
|
|
659
|
+
>>> print(df.head())
|
|
660
|
+
"""
|
|
661
|
+
if self.data is None:
|
|
662
|
+
msg = "No data available"
|
|
663
|
+
raise ValueError(msg)
|
|
664
|
+
|
|
665
|
+
return rows_to_polars(self.data)
|
|
666
|
+
|
|
667
|
+
def write_to_storage_sync(
|
|
668
|
+
self,
|
|
669
|
+
destination: "StorageDestination",
|
|
670
|
+
*,
|
|
671
|
+
format_hint: "StorageFormat | None" = None,
|
|
672
|
+
storage_options: "dict[str, Any] | None" = None,
|
|
673
|
+
pipeline: "SyncStoragePipeline | None" = None,
|
|
674
|
+
) -> "StorageTelemetry":
|
|
675
|
+
active_pipeline = pipeline or SyncStoragePipeline()
|
|
676
|
+
rows = self.get_data()
|
|
677
|
+
return active_pipeline.write_rows(rows, destination, format_hint=format_hint, storage_options=storage_options)
|
|
678
|
+
|
|
679
|
+
async def write_to_storage_async(
|
|
680
|
+
self,
|
|
681
|
+
destination: "StorageDestination",
|
|
682
|
+
*,
|
|
683
|
+
format_hint: "StorageFormat | None" = None,
|
|
684
|
+
storage_options: "dict[str, Any] | None" = None,
|
|
685
|
+
pipeline: "AsyncStoragePipeline | None" = None,
|
|
686
|
+
) -> "StorageTelemetry":
|
|
687
|
+
active_pipeline = pipeline or AsyncStoragePipeline()
|
|
688
|
+
rows = self.get_data()
|
|
689
|
+
return await active_pipeline.write_rows(
|
|
690
|
+
rows, destination, format_hint=format_hint, storage_options=storage_options
|
|
691
|
+
)
|
|
692
|
+
|
|
693
|
+
|
|
694
|
+
@mypyc_attr(allow_interpreted_subclasses=False)
|
|
695
|
+
class ArrowResult(StatementResult):
|
|
696
|
+
"""Result class for SQL operations that return Apache Arrow data.
|
|
697
|
+
|
|
698
|
+
Used when database drivers support returning results in Apache Arrow
|
|
699
|
+
format for data interchange. Suitable for analytics workloads and
|
|
700
|
+
data science applications.
|
|
701
|
+
|
|
702
|
+
Attributes:
|
|
703
|
+
schema: Arrow schema information for the result data.
|
|
704
|
+
"""
|
|
705
|
+
|
|
706
|
+
__slots__ = ("schema",)
|
|
707
|
+
|
|
708
|
+
def __init__(
|
|
709
|
+
self,
|
|
710
|
+
statement: "SQL",
|
|
711
|
+
data: Any,
|
|
712
|
+
rows_affected: int = 0,
|
|
713
|
+
last_inserted_id: int | str | None = None,
|
|
714
|
+
execution_time: float | None = None,
|
|
715
|
+
metadata: "dict[str, Any] | None" = None,
|
|
716
|
+
schema: "dict[str, Any] | None" = None,
|
|
717
|
+
) -> None:
|
|
718
|
+
"""Initialize Arrow result.
|
|
719
|
+
|
|
720
|
+
Args:
|
|
721
|
+
statement: The original SQL statement that was executed.
|
|
722
|
+
data: The Apache Arrow Table containing the result data.
|
|
723
|
+
rows_affected: Number of rows affected by the operation.
|
|
724
|
+
last_inserted_id: Last inserted ID (if applicable).
|
|
725
|
+
execution_time: Time taken to execute the statement in seconds.
|
|
726
|
+
metadata: Additional metadata about the operation.
|
|
727
|
+
schema: Optional Arrow schema information.
|
|
728
|
+
"""
|
|
729
|
+
super().__init__(
|
|
730
|
+
statement=statement,
|
|
731
|
+
data=data,
|
|
732
|
+
rows_affected=rows_affected,
|
|
733
|
+
last_inserted_id=last_inserted_id,
|
|
734
|
+
execution_time=execution_time,
|
|
735
|
+
metadata=metadata,
|
|
736
|
+
)
|
|
737
|
+
|
|
738
|
+
self.schema = schema
|
|
739
|
+
|
|
740
|
+
def is_success(self) -> bool:
|
|
741
|
+
"""Check if the operation was successful.
|
|
742
|
+
|
|
743
|
+
Returns:
|
|
744
|
+
True if Arrow table data is available, False otherwise.
|
|
745
|
+
"""
|
|
746
|
+
return self.data is not None
|
|
747
|
+
|
|
748
|
+
def get_data(self) -> "ArrowTable":
|
|
749
|
+
"""Get the Apache Arrow Table from the result.
|
|
750
|
+
|
|
751
|
+
Returns:
|
|
752
|
+
The Arrow table containing the result data.
|
|
753
|
+
|
|
754
|
+
Raises:
|
|
755
|
+
ValueError: If no Arrow table is available.
|
|
756
|
+
TypeError: If data is not an Arrow Table.
|
|
757
|
+
"""
|
|
758
|
+
if self.data is None:
|
|
759
|
+
msg = "No Arrow table available for this result"
|
|
760
|
+
raise ValueError(msg)
|
|
761
|
+
return ensure_arrow_table(self.data)
|
|
762
|
+
|
|
763
|
+
@property
|
|
764
|
+
def column_names(self) -> "list[str]":
|
|
765
|
+
"""Get the column names from the Arrow table.
|
|
766
|
+
|
|
767
|
+
Returns:
|
|
768
|
+
List of column names.
|
|
769
|
+
|
|
770
|
+
Raises:
|
|
771
|
+
ValueError: If no Arrow table is available.
|
|
772
|
+
TypeError: If data is not an Arrow Table.
|
|
773
|
+
"""
|
|
774
|
+
return arrow_table_column_names(self.get_data())
|
|
775
|
+
|
|
776
|
+
@property
|
|
777
|
+
def num_rows(self) -> int:
|
|
778
|
+
"""Get the number of rows in the Arrow table.
|
|
779
|
+
|
|
780
|
+
Returns:
|
|
781
|
+
Number of rows.
|
|
782
|
+
|
|
783
|
+
Raises:
|
|
784
|
+
ValueError: If no Arrow table is available.
|
|
785
|
+
TypeError: If data is not an Arrow Table.
|
|
786
|
+
"""
|
|
787
|
+
return arrow_table_num_rows(self.get_data())
|
|
788
|
+
|
|
789
|
+
@property
|
|
790
|
+
def num_columns(self) -> int:
|
|
791
|
+
"""Get the number of columns in the Arrow table.
|
|
792
|
+
|
|
793
|
+
Returns:
|
|
794
|
+
Number of columns.
|
|
795
|
+
|
|
796
|
+
Raises:
|
|
797
|
+
ValueError: If no Arrow table is available.
|
|
798
|
+
TypeError: If data is not an Arrow Table.
|
|
799
|
+
"""
|
|
800
|
+
return arrow_table_num_columns(self.get_data())
|
|
801
|
+
|
|
802
|
+
def to_pandas(self) -> "PandasDataFrame":
|
|
803
|
+
"""Convert Arrow data to pandas DataFrame.
|
|
804
|
+
|
|
805
|
+
Returns:
|
|
806
|
+
pandas DataFrame containing the result data.
|
|
807
|
+
|
|
808
|
+
Raises:
|
|
809
|
+
ValueError: If no Arrow table is available.
|
|
810
|
+
|
|
811
|
+
Examples:
|
|
812
|
+
>>> result = session.select_to_arrow("SELECT * FROM users")
|
|
813
|
+
>>> df = result.to_pandas()
|
|
814
|
+
>>> print(df.head())
|
|
815
|
+
"""
|
|
816
|
+
return arrow_table_to_pandas(self.get_data())
|
|
817
|
+
|
|
818
|
+
def to_polars(self) -> "PolarsDataFrame":
|
|
819
|
+
"""Convert Arrow data to Polars DataFrame.
|
|
820
|
+
|
|
821
|
+
Returns:
|
|
822
|
+
Polars DataFrame containing the result data.
|
|
823
|
+
|
|
824
|
+
Raises:
|
|
825
|
+
ValueError: If no Arrow table is available.
|
|
826
|
+
|
|
827
|
+
Examples:
|
|
828
|
+
>>> result = session.select_to_arrow("SELECT * FROM users")
|
|
829
|
+
>>> df = result.to_polars()
|
|
830
|
+
>>> print(df.head())
|
|
831
|
+
"""
|
|
832
|
+
return arrow_table_to_polars(self.get_data())
|
|
833
|
+
|
|
834
|
+
def to_dict(self) -> "list[dict[str, Any]]":
|
|
835
|
+
"""Convert Arrow data to list of dictionaries.
|
|
836
|
+
|
|
837
|
+
Returns:
|
|
838
|
+
List of dictionaries, one per row.
|
|
839
|
+
|
|
840
|
+
Raises:
|
|
841
|
+
ValueError: If no Arrow table is available.
|
|
842
|
+
TypeError: If data is not an Arrow Table.
|
|
843
|
+
|
|
844
|
+
Examples:
|
|
845
|
+
>>> result = session.select_to_arrow(
|
|
846
|
+
... "SELECT id, name FROM users"
|
|
847
|
+
... )
|
|
848
|
+
>>> rows = result.to_dict()
|
|
849
|
+
>>> print(rows[0])
|
|
850
|
+
{'id': 1, 'name': 'Alice'}
|
|
851
|
+
"""
|
|
852
|
+
return arrow_table_to_pylist(self.get_data())
|
|
853
|
+
|
|
854
|
+
def write_to_storage_sync(
|
|
855
|
+
self,
|
|
856
|
+
destination: "StorageDestination",
|
|
857
|
+
*,
|
|
858
|
+
format_hint: "StorageFormat | None" = None,
|
|
859
|
+
storage_options: "dict[str, Any] | None" = None,
|
|
860
|
+
compression: str | None = None,
|
|
861
|
+
pipeline: "SyncStoragePipeline | None" = None,
|
|
862
|
+
) -> "StorageTelemetry":
|
|
863
|
+
table = self.get_data()
|
|
864
|
+
active_pipeline = pipeline or SyncStoragePipeline()
|
|
865
|
+
return active_pipeline.write_arrow(
|
|
866
|
+
table, destination, format_hint=format_hint, storage_options=storage_options, compression=compression
|
|
867
|
+
)
|
|
868
|
+
|
|
869
|
+
async def write_to_storage_async(
|
|
870
|
+
self,
|
|
871
|
+
destination: "StorageDestination",
|
|
872
|
+
*,
|
|
873
|
+
format_hint: "StorageFormat | None" = None,
|
|
874
|
+
storage_options: "dict[str, Any] | None" = None,
|
|
875
|
+
compression: str | None = None,
|
|
876
|
+
pipeline: "AsyncStoragePipeline | None" = None,
|
|
877
|
+
) -> "StorageTelemetry":
|
|
878
|
+
table = self.get_data()
|
|
879
|
+
active_pipeline = pipeline or AsyncStoragePipeline()
|
|
880
|
+
return await active_pipeline.write_arrow(
|
|
881
|
+
table, destination, format_hint=format_hint, storage_options=storage_options, compression=compression
|
|
882
|
+
)
|
|
883
|
+
|
|
884
|
+
def __len__(self) -> int:
|
|
885
|
+
"""Return number of rows in the Arrow table.
|
|
886
|
+
|
|
887
|
+
Returns:
|
|
888
|
+
Number of rows.
|
|
889
|
+
|
|
890
|
+
Raises:
|
|
891
|
+
ValueError: If no Arrow table is available.
|
|
892
|
+
TypeError: If data is not an Arrow Table.
|
|
893
|
+
|
|
894
|
+
Examples:
|
|
895
|
+
>>> result = session.select_to_arrow("SELECT * FROM users")
|
|
896
|
+
>>> print(len(result))
|
|
897
|
+
100
|
|
898
|
+
"""
|
|
899
|
+
return arrow_table_num_rows(self.get_data())
|
|
900
|
+
|
|
901
|
+
def __iter__(self) -> "Iterator[dict[str, Any]]":
|
|
902
|
+
"""Iterate over rows as dictionaries.
|
|
903
|
+
|
|
904
|
+
Yields:
|
|
905
|
+
Dictionary for each row.
|
|
906
|
+
|
|
907
|
+
Raises:
|
|
908
|
+
ValueError: If no Arrow table is available.
|
|
909
|
+
|
|
910
|
+
Examples:
|
|
911
|
+
>>> result = session.select_to_arrow(
|
|
912
|
+
... "SELECT id, name FROM users"
|
|
913
|
+
... )
|
|
914
|
+
>>> for row in result:
|
|
915
|
+
... print(row["name"])
|
|
916
|
+
"""
|
|
917
|
+
yield from arrow_table_to_pylist(self.get_data())
|
|
918
|
+
|
|
919
|
+
|
|
920
|
+
class EmptyResult(StatementResult):
|
|
921
|
+
"""Sentinel result used when a stack operation has no driver result."""
|
|
922
|
+
|
|
923
|
+
__slots__ = ()
|
|
924
|
+
|
|
925
|
+
def __init__(self) -> None:
|
|
926
|
+
super().__init__(statement=_EMPTY_RESULT_STATEMENT, data=[], rows_affected=0)
|
|
927
|
+
|
|
928
|
+
def __iter__(self) -> "Iterator[Any]":
|
|
929
|
+
return iter(())
|
|
930
|
+
|
|
931
|
+
def is_success(self) -> bool:
|
|
932
|
+
return True
|
|
933
|
+
|
|
934
|
+
def get_data(self) -> "list[Any]":
|
|
935
|
+
return []
|
|
936
|
+
|
|
937
|
+
|
|
938
|
+
class StackResult:
|
|
939
|
+
"""Wrapper for per-operation stack results that surfaces driver results directly."""
|
|
940
|
+
|
|
941
|
+
__slots__ = ("error", "metadata", "result", "rows_affected", "warning")
|
|
942
|
+
|
|
943
|
+
def __init__(
|
|
944
|
+
self,
|
|
945
|
+
result: "StatementResult | ArrowResult | None" = None,
|
|
946
|
+
*,
|
|
947
|
+
rows_affected: int | None = None,
|
|
948
|
+
error: Exception | None = None,
|
|
949
|
+
warning: Any | None = None,
|
|
950
|
+
metadata: "dict[str, Any] | None" = None,
|
|
951
|
+
) -> None:
|
|
952
|
+
self.result: StatementResult | ArrowResult = result if result is not None else EmptyResult()
|
|
953
|
+
if rows_affected is not None:
|
|
954
|
+
self.rows_affected = rows_affected
|
|
955
|
+
else:
|
|
956
|
+
try:
|
|
957
|
+
result_rows = object.__getattribute__(self.result, "rows_affected")
|
|
958
|
+
except AttributeError:
|
|
959
|
+
self.rows_affected = 0
|
|
960
|
+
else:
|
|
961
|
+
self.rows_affected = int(result_rows)
|
|
962
|
+
self.error = error
|
|
963
|
+
self.warning = warning
|
|
964
|
+
self.metadata = dict(metadata) if metadata else None
|
|
965
|
+
|
|
966
|
+
def get_result(self) -> "StatementResult | ArrowResult":
|
|
967
|
+
"""Return the underlying driver result."""
|
|
968
|
+
|
|
969
|
+
return self.result
|
|
970
|
+
|
|
971
|
+
@property
|
|
972
|
+
def result_type(self) -> str:
|
|
973
|
+
"""Describe the underlying result type (SQL operation, Arrow, or custom)."""
|
|
974
|
+
|
|
975
|
+
if isinstance(self.result, ArrowResult):
|
|
976
|
+
return "ARROW"
|
|
977
|
+
if isinstance(self.result, SQLResult):
|
|
978
|
+
return self.result.operation_type.upper()
|
|
979
|
+
return type(self.result).__name__.upper()
|
|
980
|
+
|
|
981
|
+
def is_sql_result(self) -> bool:
|
|
982
|
+
"""Return True when the underlying result is an SQLResult."""
|
|
983
|
+
|
|
984
|
+
return isinstance(self.result, StatementResult) and not isinstance(self.result, ArrowResult)
|
|
985
|
+
|
|
986
|
+
def is_arrow_result(self) -> bool:
|
|
987
|
+
"""Return True when the underlying result is an ArrowResult."""
|
|
988
|
+
|
|
989
|
+
return isinstance(self.result, ArrowResult)
|
|
990
|
+
|
|
991
|
+
def is_error(self) -> bool:
|
|
992
|
+
"""Return True when the stack operation captured an error."""
|
|
993
|
+
|
|
994
|
+
return self.error is not None
|
|
995
|
+
|
|
996
|
+
def with_error(self, error: Exception) -> "StackResult":
|
|
997
|
+
"""Return a copy of the result that records the provided error."""
|
|
998
|
+
|
|
999
|
+
return StackResult(
|
|
1000
|
+
result=self.result,
|
|
1001
|
+
rows_affected=self.rows_affected,
|
|
1002
|
+
warning=self.warning,
|
|
1003
|
+
metadata=self.metadata,
|
|
1004
|
+
error=error,
|
|
1005
|
+
)
|
|
1006
|
+
|
|
1007
|
+
@classmethod
|
|
1008
|
+
def from_sql_result(cls, result: "SQLResult") -> "StackResult":
|
|
1009
|
+
"""Convert a standard SQLResult into a stack-friendly representation."""
|
|
1010
|
+
|
|
1011
|
+
metadata = dict(result.metadata) if result.metadata else None
|
|
1012
|
+
warning = metadata.get("warning") if metadata else None
|
|
1013
|
+
return cls(result=result, rows_affected=result.rows_affected, warning=warning, metadata=metadata)
|
|
1014
|
+
|
|
1015
|
+
@classmethod
|
|
1016
|
+
def from_arrow_result(cls, result: "ArrowResult") -> "StackResult":
|
|
1017
|
+
"""Create a stack result from an ArrowResult instance."""
|
|
1018
|
+
|
|
1019
|
+
metadata = dict(result.metadata) if result.metadata else None
|
|
1020
|
+
return cls(result=result, rows_affected=result.rows_affected, metadata=metadata)
|
|
1021
|
+
|
|
1022
|
+
@classmethod
|
|
1023
|
+
def from_error(cls, error: Exception) -> "StackResult":
|
|
1024
|
+
"""Create an error-only stack result."""
|
|
1025
|
+
|
|
1026
|
+
return cls(result=EmptyResult(), rows_affected=0, error=error)
|
|
1027
|
+
|
|
1028
|
+
|
|
1029
|
+
def create_sql_result(
|
|
1030
|
+
statement: "SQL",
|
|
1031
|
+
data: "list[dict[str, Any]] | None" = None,
|
|
1032
|
+
rows_affected: int = 0,
|
|
1033
|
+
last_inserted_id: int | str | None = None,
|
|
1034
|
+
execution_time: float | None = None,
|
|
1035
|
+
metadata: "dict[str, Any] | None" = None,
|
|
1036
|
+
**kwargs: Any,
|
|
1037
|
+
) -> SQLResult:
|
|
1038
|
+
"""Create SQLResult instance.
|
|
1039
|
+
|
|
1040
|
+
Args:
|
|
1041
|
+
statement: The SQL statement that produced this result.
|
|
1042
|
+
data: Result data from query execution.
|
|
1043
|
+
rows_affected: Number of rows affected by the operation.
|
|
1044
|
+
last_inserted_id: Last inserted ID (for INSERT operations).
|
|
1045
|
+
execution_time: Execution time in seconds.
|
|
1046
|
+
metadata: Additional metadata about the result.
|
|
1047
|
+
**kwargs: Additional arguments for SQLResult initialization.
|
|
1048
|
+
|
|
1049
|
+
Returns:
|
|
1050
|
+
SQLResult instance.
|
|
1051
|
+
"""
|
|
1052
|
+
return SQLResult(
|
|
1053
|
+
statement=statement,
|
|
1054
|
+
data=data,
|
|
1055
|
+
rows_affected=rows_affected,
|
|
1056
|
+
last_inserted_id=last_inserted_id,
|
|
1057
|
+
execution_time=execution_time,
|
|
1058
|
+
metadata=metadata,
|
|
1059
|
+
**kwargs,
|
|
1060
|
+
)
|
|
1061
|
+
|
|
1062
|
+
|
|
1063
|
+
def build_arrow_result_from_table(
|
|
1064
|
+
statement: "SQL",
|
|
1065
|
+
table: "ArrowTable",
|
|
1066
|
+
*,
|
|
1067
|
+
return_format: "ArrowReturnFormat" = "table",
|
|
1068
|
+
batch_size: int | None = None,
|
|
1069
|
+
arrow_schema: Any = None,
|
|
1070
|
+
) -> ArrowResult:
|
|
1071
|
+
"""Create ArrowResult from a pyarrow table with optional formatting.
|
|
1072
|
+
|
|
1073
|
+
Args:
|
|
1074
|
+
statement: SQL statement that produced the table.
|
|
1075
|
+
table: Arrow table to wrap.
|
|
1076
|
+
return_format: Output format for the Arrow data.
|
|
1077
|
+
batch_size: Batch size hint for batch-based formats.
|
|
1078
|
+
arrow_schema: Optional pyarrow.Schema for casting.
|
|
1079
|
+
|
|
1080
|
+
Returns:
|
|
1081
|
+
ArrowResult instance.
|
|
1082
|
+
"""
|
|
1083
|
+
|
|
1084
|
+
coerced_table = cast_arrow_table_schema(table, arrow_schema)
|
|
1085
|
+
arrow_data = arrow_table_to_return_format(coerced_table, return_format=return_format, batch_size=batch_size)
|
|
1086
|
+
rows_affected = arrow_table_num_rows(coerced_table)
|
|
1087
|
+
return create_arrow_result(statement=statement, data=arrow_data, rows_affected=rows_affected)
|
|
1088
|
+
|
|
1089
|
+
|
|
1090
|
+
def create_arrow_result(
|
|
1091
|
+
statement: "SQL",
|
|
1092
|
+
data: Any,
|
|
1093
|
+
rows_affected: int = 0,
|
|
1094
|
+
last_inserted_id: int | str | None = None,
|
|
1095
|
+
execution_time: float | None = None,
|
|
1096
|
+
metadata: "dict[str, Any] | None" = None,
|
|
1097
|
+
schema: "dict[str, Any] | None" = None,
|
|
1098
|
+
) -> ArrowResult:
|
|
1099
|
+
"""Create ArrowResult instance.
|
|
1100
|
+
|
|
1101
|
+
Args:
|
|
1102
|
+
statement: The SQL statement that produced this result.
|
|
1103
|
+
data: Arrow-based result data.
|
|
1104
|
+
rows_affected: Number of rows affected by the operation.
|
|
1105
|
+
last_inserted_id: Last inserted ID (for INSERT operations).
|
|
1106
|
+
execution_time: Execution time in seconds.
|
|
1107
|
+
metadata: Additional metadata about the result.
|
|
1108
|
+
schema: Optional Arrow schema information.
|
|
1109
|
+
|
|
1110
|
+
Returns:
|
|
1111
|
+
ArrowResult instance.
|
|
1112
|
+
"""
|
|
1113
|
+
return ArrowResult(
|
|
1114
|
+
statement=statement,
|
|
1115
|
+
data=data,
|
|
1116
|
+
rows_affected=rows_affected,
|
|
1117
|
+
last_inserted_id=last_inserted_id,
|
|
1118
|
+
execution_time=execution_time,
|
|
1119
|
+
metadata=metadata,
|
|
1120
|
+
schema=schema,
|
|
1121
|
+
)
|