sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +140 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +315 -0
- sqlspec/_typing.py +700 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_typing.py +82 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +1273 -0
- sqlspec/adapters/adbc/config.py +295 -0
- sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/core.py +735 -0
- sqlspec/adapters/adbc/data_dictionary.py +334 -0
- sqlspec/adapters/adbc/driver.py +529 -0
- sqlspec/adapters/adbc/events/__init__.py +5 -0
- sqlspec/adapters/adbc/events/store.py +285 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +502 -0
- sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/type_converter.py +140 -0
- sqlspec/adapters/aiosqlite/__init__.py +25 -0
- sqlspec/adapters/aiosqlite/_typing.py +82 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +818 -0
- sqlspec/adapters/aiosqlite/config.py +334 -0
- sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/aiosqlite/core.py +315 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
- sqlspec/adapters/aiosqlite/driver.py +313 -0
- sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/events/store.py +20 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
- sqlspec/adapters/aiosqlite/pool.py +533 -0
- sqlspec/adapters/asyncmy/__init__.py +21 -0
- sqlspec/adapters/asyncmy/_typing.py +87 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +703 -0
- sqlspec/adapters/asyncmy/config.py +302 -0
- sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncmy/core.py +360 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
- sqlspec/adapters/asyncmy/driver.py +383 -0
- sqlspec/adapters/asyncmy/events/__init__.py +5 -0
- sqlspec/adapters/asyncmy/events/store.py +104 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +19 -0
- sqlspec/adapters/asyncpg/_typing.py +88 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +748 -0
- sqlspec/adapters/asyncpg/config.py +569 -0
- sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncpg/core.py +367 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
- sqlspec/adapters/asyncpg/driver.py +487 -0
- sqlspec/adapters/asyncpg/events/__init__.py +6 -0
- sqlspec/adapters/asyncpg/events/backend.py +286 -0
- sqlspec/adapters/asyncpg/events/store.py +40 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +251 -0
- sqlspec/adapters/bigquery/__init__.py +14 -0
- sqlspec/adapters/bigquery/_typing.py +86 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +827 -0
- sqlspec/adapters/bigquery/config.py +353 -0
- sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/core.py +715 -0
- sqlspec/adapters/bigquery/data_dictionary.py +128 -0
- sqlspec/adapters/bigquery/driver.py +548 -0
- sqlspec/adapters/bigquery/events/__init__.py +5 -0
- sqlspec/adapters/bigquery/events/store.py +139 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +325 -0
- sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/type_converter.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
- sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
- sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
- sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
- sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
- sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
- sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
- sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
- sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
- sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
- sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
- sqlspec/adapters/cockroach_psycopg/config.py +484 -0
- sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_psycopg/core.py +63 -0
- sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
- sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
- sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
- sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
- sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
- sqlspec/adapters/duckdb/__init__.py +25 -0
- sqlspec/adapters/duckdb/_typing.py +81 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +850 -0
- sqlspec/adapters/duckdb/config.py +463 -0
- sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/core.py +257 -0
- sqlspec/adapters/duckdb/data_dictionary.py +140 -0
- sqlspec/adapters/duckdb/driver.py +430 -0
- sqlspec/adapters/duckdb/events/__init__.py +5 -0
- sqlspec/adapters/duckdb/events/store.py +57 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +330 -0
- sqlspec/adapters/duckdb/pool.py +293 -0
- sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/type_converter.py +118 -0
- sqlspec/adapters/mock/__init__.py +72 -0
- sqlspec/adapters/mock/_typing.py +147 -0
- sqlspec/adapters/mock/config.py +483 -0
- sqlspec/adapters/mock/core.py +319 -0
- sqlspec/adapters/mock/data_dictionary.py +366 -0
- sqlspec/adapters/mock/driver.py +721 -0
- sqlspec/adapters/mysqlconnector/__init__.py +36 -0
- sqlspec/adapters/mysqlconnector/_typing.py +141 -0
- sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
- sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
- sqlspec/adapters/mysqlconnector/config.py +394 -0
- sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/mysqlconnector/core.py +303 -0
- sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
- sqlspec/adapters/mysqlconnector/driver.py +483 -0
- sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
- sqlspec/adapters/mysqlconnector/events/store.py +98 -0
- sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
- sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
- sqlspec/adapters/oracledb/__init__.py +60 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
- sqlspec/adapters/oracledb/_typing.py +182 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
- sqlspec/adapters/oracledb/adk/__init__.py +10 -0
- sqlspec/adapters/oracledb/adk/store.py +2369 -0
- sqlspec/adapters/oracledb/config.py +550 -0
- sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/core.py +543 -0
- sqlspec/adapters/oracledb/data_dictionary.py +536 -0
- sqlspec/adapters/oracledb/driver.py +1229 -0
- sqlspec/adapters/oracledb/events/__init__.py +16 -0
- sqlspec/adapters/oracledb/events/backend.py +347 -0
- sqlspec/adapters/oracledb/events/store.py +420 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +781 -0
- sqlspec/adapters/oracledb/migrations.py +535 -0
- sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/type_converter.py +211 -0
- sqlspec/adapters/psqlpy/__init__.py +17 -0
- sqlspec/adapters/psqlpy/_typing.py +79 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +766 -0
- sqlspec/adapters/psqlpy/config.py +304 -0
- sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/core.py +480 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
- sqlspec/adapters/psqlpy/driver.py +438 -0
- sqlspec/adapters/psqlpy/events/__init__.py +6 -0
- sqlspec/adapters/psqlpy/events/backend.py +310 -0
- sqlspec/adapters/psqlpy/events/store.py +20 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +270 -0
- sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/type_converter.py +113 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_typing.py +164 -0
- sqlspec/adapters/psycopg/adk/__init__.py +10 -0
- sqlspec/adapters/psycopg/adk/store.py +1387 -0
- sqlspec/adapters/psycopg/config.py +576 -0
- sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/core.py +450 -0
- sqlspec/adapters/psycopg/data_dictionary.py +289 -0
- sqlspec/adapters/psycopg/driver.py +975 -0
- sqlspec/adapters/psycopg/events/__init__.py +20 -0
- sqlspec/adapters/psycopg/events/backend.py +458 -0
- sqlspec/adapters/psycopg/events/store.py +42 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +552 -0
- sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/type_converter.py +93 -0
- sqlspec/adapters/pymysql/__init__.py +21 -0
- sqlspec/adapters/pymysql/_typing.py +71 -0
- sqlspec/adapters/pymysql/adk/__init__.py +5 -0
- sqlspec/adapters/pymysql/adk/store.py +540 -0
- sqlspec/adapters/pymysql/config.py +195 -0
- sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/pymysql/core.py +299 -0
- sqlspec/adapters/pymysql/data_dictionary.py +122 -0
- sqlspec/adapters/pymysql/driver.py +259 -0
- sqlspec/adapters/pymysql/events/__init__.py +5 -0
- sqlspec/adapters/pymysql/events/store.py +50 -0
- sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
- sqlspec/adapters/pymysql/litestar/store.py +232 -0
- sqlspec/adapters/pymysql/pool.py +137 -0
- sqlspec/adapters/spanner/__init__.py +40 -0
- sqlspec/adapters/spanner/_typing.py +86 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +732 -0
- sqlspec/adapters/spanner/config.py +352 -0
- sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/core.py +188 -0
- sqlspec/adapters/spanner/data_dictionary.py +120 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
- sqlspec/adapters/spanner/driver.py +373 -0
- sqlspec/adapters/spanner/events/__init__.py +5 -0
- sqlspec/adapters/spanner/events/store.py +187 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +291 -0
- sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/type_converter.py +331 -0
- sqlspec/adapters/sqlite/__init__.py +19 -0
- sqlspec/adapters/sqlite/_typing.py +80 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +958 -0
- sqlspec/adapters/sqlite/config.py +280 -0
- sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/core.py +312 -0
- sqlspec/adapters/sqlite/data_dictionary.py +202 -0
- sqlspec/adapters/sqlite/driver.py +359 -0
- sqlspec/adapters/sqlite/events/__init__.py +5 -0
- sqlspec/adapters/sqlite/events/store.py +20 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +316 -0
- sqlspec/adapters/sqlite/pool.py +198 -0
- sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/type_converter.py +114 -0
- sqlspec/base.py +747 -0
- sqlspec/builder/__init__.py +179 -0
- sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_base.py +1022 -0
- sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_column.py +521 -0
- sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_delete.py +95 -0
- sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_dml.py +365 -0
- sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_explain.py +579 -0
- sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_factory.py +1697 -0
- sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_insert.py +328 -0
- sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_join.py +499 -0
- sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_merge.py +821 -0
- sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_select.py +1660 -0
- sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_temporal.py +139 -0
- sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_update.py +173 -0
- sqlspec/builder/_vector_expressions.py +267 -0
- sqlspec/cli.py +911 -0
- sqlspec/config.py +1755 -0
- sqlspec/core/__init__.py +374 -0
- sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/_correlation.py +176 -0
- sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +1069 -0
- sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +954 -0
- sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/explain.py +275 -0
- sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +952 -0
- sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +262 -0
- sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +71 -0
- sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_alignment.py +270 -0
- sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_converter.py +543 -0
- sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_processor.py +505 -0
- sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_registry.py +206 -0
- sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_transformers.py +292 -0
- sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_types.py +499 -0
- sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_validator.py +180 -0
- sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/pipeline.py +319 -0
- sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/query_modifiers.py +437 -0
- sqlspec/core/result/__init__.py +23 -0
- sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_base.py +1121 -0
- sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_io.py +28 -0
- sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +966 -0
- sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +1503 -0
- sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/type_converter.py +339 -0
- sqlspec/data_dictionary/__init__.py +22 -0
- sqlspec/data_dictionary/_loader.py +123 -0
- sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_registry.py +74 -0
- sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_types.py +121 -0
- sqlspec/data_dictionary/dialects/__init__.py +21 -0
- sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/bigquery.py +49 -0
- sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
- sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/duckdb.py +47 -0
- sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/mysql.py +42 -0
- sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/oracle.py +34 -0
- sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/postgres.py +46 -0
- sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/spanner.py +37 -0
- sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/sqlite.py +42 -0
- sqlspec/data_dictionary/sql/.gitkeep +0 -0
- sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
- sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
- sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
- sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
- sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
- sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
- sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
- sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
- sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
- sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
- sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
- sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
- sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
- sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
- sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
- sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
- sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
- sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
- sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
- sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
- sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
- sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
- sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
- sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
- sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
- sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
- sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
- sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
- sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
- sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
- sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
- sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
- sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
- sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
- sqlspec/driver/__init__.py +32 -0
- sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_async.py +1737 -0
- sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_common.py +1478 -0
- sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sql_helpers.py +148 -0
- sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_storage_helpers.py +144 -0
- sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sync.py +1710 -0
- sqlspec/exceptions.py +338 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +70 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/memory/__init__.py +69 -0
- sqlspec/extensions/adk/memory/_types.py +30 -0
- sqlspec/extensions/adk/memory/converters.py +149 -0
- sqlspec/extensions/adk/memory/service.py +217 -0
- sqlspec/extensions/adk/memory/store.py +569 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +225 -0
- sqlspec/extensions/adk/store.py +567 -0
- sqlspec/extensions/events/__init__.py +51 -0
- sqlspec/extensions/events/_channel.py +703 -0
- sqlspec/extensions/events/_hints.py +45 -0
- sqlspec/extensions/events/_models.py +23 -0
- sqlspec/extensions/events/_payload.py +69 -0
- sqlspec/extensions/events/_protocols.py +134 -0
- sqlspec/extensions/events/_queue.py +461 -0
- sqlspec/extensions/events/_store.py +209 -0
- sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
- sqlspec/extensions/events/migrations/__init__.py +3 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +351 -0
- sqlspec/extensions/fastapi/providers.py +607 -0
- sqlspec/extensions/flask/__init__.py +37 -0
- sqlspec/extensions/flask/_state.py +76 -0
- sqlspec/extensions/flask/_utils.py +71 -0
- sqlspec/extensions/flask/extension.py +519 -0
- sqlspec/extensions/litestar/__init__.py +28 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/channels.py +165 -0
- sqlspec/extensions/litestar/cli.py +102 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +671 -0
- sqlspec/extensions/litestar/providers.py +526 -0
- sqlspec/extensions/litestar/store.py +296 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +113 -0
- sqlspec/extensions/starlette/__init__.py +19 -0
- sqlspec/extensions/starlette/_state.py +30 -0
- sqlspec/extensions/starlette/_utils.py +96 -0
- sqlspec/extensions/starlette/extension.py +346 -0
- sqlspec/extensions/starlette/middleware.py +235 -0
- sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +702 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +731 -0
- sqlspec/migrations/commands.py +1232 -0
- sqlspec/migrations/context.py +157 -0
- sqlspec/migrations/fix.py +204 -0
- sqlspec/migrations/loaders.py +443 -0
- sqlspec/migrations/runner.py +1172 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +611 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +207 -0
- sqlspec/migrations/version.py +446 -0
- sqlspec/observability/__init__.py +55 -0
- sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_common.py +77 -0
- sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_config.py +348 -0
- sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_diagnostics.py +74 -0
- sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_dispatcher.py +152 -0
- sqlspec/observability/_formatters/__init__.py +13 -0
- sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_aws.py +102 -0
- sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_azure.py +96 -0
- sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_base.py +57 -0
- sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_gcp.py +131 -0
- sqlspec/observability/_formatting.py +58 -0
- sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_observer.py +357 -0
- sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_runtime.py +420 -0
- sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_sampling.py +188 -0
- sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_spans.py +161 -0
- sqlspec/protocols.py +916 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +48 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +253 -0
- sqlspec/storage/backends/fsspec.py +529 -0
- sqlspec/storage/backends/local.py +441 -0
- sqlspec/storage/backends/obstore.py +916 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +582 -0
- sqlspec/storage/registry.py +301 -0
- sqlspec/typing.py +395 -0
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/arrow_helpers.py +318 -0
- sqlspec/utils/config_tools.py +332 -0
- sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/correlation.py +134 -0
- sqlspec/utils/deprecation.py +190 -0
- sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +258 -0
- sqlspec/utils/logging.py +222 -0
- sqlspec/utils/module_loader.py +306 -0
- sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/portal.py +375 -0
- sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/schema.py +485 -0
- sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/serializers.py +408 -0
- sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +311 -0
- sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_converters.py +128 -0
- sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1360 -0
- sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/uuids.py +225 -0
- sqlspec-0.36.0.dist-info/METADATA +205 -0
- sqlspec-0.36.0.dist-info/RECORD +531 -0
- sqlspec-0.36.0.dist-info/WHEEL +7 -0
- sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1229 @@
|
|
|
1
|
+
"""Oracle Driver"""
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
import logging
|
|
5
|
+
from typing import TYPE_CHECKING, Any, NamedTuple, cast
|
|
6
|
+
|
|
7
|
+
import oracledb
|
|
8
|
+
from oracledb import AsyncCursor, Cursor
|
|
9
|
+
|
|
10
|
+
from sqlspec.adapters.oracledb._typing import (
|
|
11
|
+
OracleAsyncConnection,
|
|
12
|
+
OracleAsyncSessionContext,
|
|
13
|
+
OracleSyncConnection,
|
|
14
|
+
OracleSyncSessionContext,
|
|
15
|
+
)
|
|
16
|
+
from sqlspec.adapters.oracledb.core import (
|
|
17
|
+
ORACLEDB_VERSION,
|
|
18
|
+
build_insert_statement,
|
|
19
|
+
build_pipeline_stack_result,
|
|
20
|
+
build_truncate_statement,
|
|
21
|
+
coerce_large_string_parameters_async,
|
|
22
|
+
coerce_large_string_parameters_sync,
|
|
23
|
+
collect_async_rows,
|
|
24
|
+
collect_sync_rows,
|
|
25
|
+
create_mapped_exception,
|
|
26
|
+
default_statement_config,
|
|
27
|
+
driver_profile,
|
|
28
|
+
normalize_column_names,
|
|
29
|
+
normalize_execute_many_parameters_async,
|
|
30
|
+
normalize_execute_many_parameters_sync,
|
|
31
|
+
resolve_rowcount,
|
|
32
|
+
)
|
|
33
|
+
from sqlspec.adapters.oracledb.data_dictionary import OracledbAsyncDataDictionary, OracledbSyncDataDictionary
|
|
34
|
+
from sqlspec.core import (
|
|
35
|
+
SQL,
|
|
36
|
+
StackResult,
|
|
37
|
+
StatementConfig,
|
|
38
|
+
StatementStack,
|
|
39
|
+
build_arrow_result_from_table,
|
|
40
|
+
get_cache_config,
|
|
41
|
+
register_driver_profile,
|
|
42
|
+
)
|
|
43
|
+
from sqlspec.driver import (
|
|
44
|
+
AsyncDriverAdapterBase,
|
|
45
|
+
StackExecutionObserver,
|
|
46
|
+
SyncDriverAdapterBase,
|
|
47
|
+
describe_stack_statement,
|
|
48
|
+
hash_stack_operations,
|
|
49
|
+
)
|
|
50
|
+
from sqlspec.exceptions import ImproperConfigurationError, SQLSpecError, StackExecutionError
|
|
51
|
+
from sqlspec.utils.logging import get_logger, log_with_context
|
|
52
|
+
from sqlspec.utils.module_loader import ensure_pyarrow
|
|
53
|
+
from sqlspec.utils.type_guards import has_pipeline_capability
|
|
54
|
+
|
|
55
|
+
if TYPE_CHECKING:
|
|
56
|
+
from collections.abc import Sequence
|
|
57
|
+
|
|
58
|
+
from sqlspec.adapters.oracledb._typing import OraclePipelineDriver
|
|
59
|
+
from sqlspec.builder import QueryBuilder
|
|
60
|
+
from sqlspec.core import ArrowResult, Statement, StatementConfig, StatementFilter
|
|
61
|
+
from sqlspec.core.stack import StackOperation
|
|
62
|
+
from sqlspec.driver import ExecutionResult
|
|
63
|
+
from sqlspec.storage import StorageBridgeJob, StorageDestination, StorageFormat, StorageTelemetry
|
|
64
|
+
from sqlspec.typing import ArrowReturnFormat, StatementParameters, VersionInfo
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
logger = get_logger(__name__)
|
|
68
|
+
|
|
69
|
+
# Oracle-specific constants
|
|
70
|
+
LARGE_STRING_THRESHOLD = 4000 # Threshold for large string parameters to avoid ORA-01704
|
|
71
|
+
|
|
72
|
+
__all__ = (
|
|
73
|
+
"OracleAsyncDriver",
|
|
74
|
+
"OracleAsyncExceptionHandler",
|
|
75
|
+
"OracleAsyncSessionContext",
|
|
76
|
+
"OracleSyncDriver",
|
|
77
|
+
"OracleSyncExceptionHandler",
|
|
78
|
+
"OracleSyncSessionContext",
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
PIPELINE_MIN_DRIVER_VERSION: "tuple[int, int, int]" = (2, 4, 0)
|
|
82
|
+
PIPELINE_MIN_DATABASE_MAJOR: int = 23
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class _CompiledStackOperation(NamedTuple):
|
|
86
|
+
statement: SQL
|
|
87
|
+
sql: str
|
|
88
|
+
parameters: Any
|
|
89
|
+
method: str
|
|
90
|
+
returns_rows: bool
|
|
91
|
+
summary: str
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class OraclePipelineMixin:
|
|
95
|
+
"""Shared helpers for Oracle pipeline execution."""
|
|
96
|
+
|
|
97
|
+
__slots__ = ()
|
|
98
|
+
|
|
99
|
+
def _stack_native_blocker(self, stack: "StatementStack") -> "str | None":
|
|
100
|
+
for operation in stack.operations:
|
|
101
|
+
if operation.method == "execute_arrow":
|
|
102
|
+
return "arrow_operation"
|
|
103
|
+
if operation.method == "execute_script":
|
|
104
|
+
return "script_operation"
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
def _log_pipeline_skip(self, reason: str, stack: "StatementStack") -> None:
|
|
108
|
+
log_level = logging.INFO if reason == "env_override" else logging.DEBUG
|
|
109
|
+
log_with_context(
|
|
110
|
+
logger,
|
|
111
|
+
log_level,
|
|
112
|
+
"stack.native_pipeline.skip",
|
|
113
|
+
driver=type(self).__name__,
|
|
114
|
+
reason=reason,
|
|
115
|
+
hashed_operations=hash_stack_operations(stack),
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
def _prepare_pipeline_operation(self, operation: "StackOperation") -> _CompiledStackOperation:
|
|
119
|
+
driver = cast("OraclePipelineDriver", self)
|
|
120
|
+
kwargs = dict(operation.keyword_arguments) if operation.keyword_arguments else {}
|
|
121
|
+
statement_config = kwargs.pop("statement_config", None)
|
|
122
|
+
config = statement_config or driver.statement_config
|
|
123
|
+
|
|
124
|
+
if operation.method == "execute":
|
|
125
|
+
sql_statement = driver.prepare_statement(
|
|
126
|
+
operation.statement, operation.arguments, statement_config=config, kwargs=kwargs
|
|
127
|
+
)
|
|
128
|
+
elif operation.method == "execute_many":
|
|
129
|
+
if not operation.arguments:
|
|
130
|
+
msg = "execute_many stack operation requires parameter sets"
|
|
131
|
+
raise ValueError(msg)
|
|
132
|
+
parameter_sets = operation.arguments[0]
|
|
133
|
+
filters = operation.arguments[1:]
|
|
134
|
+
if isinstance(operation.statement, SQL):
|
|
135
|
+
statement_seed = operation.statement.raw_expression or operation.statement.raw_sql
|
|
136
|
+
sql_statement = SQL(statement_seed, parameter_sets, statement_config=config, is_many=True, **kwargs)
|
|
137
|
+
else:
|
|
138
|
+
base_statement = driver.prepare_statement(
|
|
139
|
+
operation.statement, filters, statement_config=config, kwargs=kwargs
|
|
140
|
+
)
|
|
141
|
+
statement_seed = base_statement.raw_expression or base_statement.raw_sql
|
|
142
|
+
sql_statement = SQL(statement_seed, parameter_sets, statement_config=config, is_many=True, **kwargs)
|
|
143
|
+
else:
|
|
144
|
+
msg = f"Unsupported stack operation method: {operation.method}"
|
|
145
|
+
raise ValueError(msg)
|
|
146
|
+
|
|
147
|
+
compiled_sql, prepared_parameters = driver._get_compiled_sql( # pyright: ignore[reportPrivateUsage]
|
|
148
|
+
sql_statement, config
|
|
149
|
+
)
|
|
150
|
+
summary = describe_stack_statement(operation.statement)
|
|
151
|
+
return _CompiledStackOperation(
|
|
152
|
+
statement=sql_statement,
|
|
153
|
+
sql=compiled_sql,
|
|
154
|
+
parameters=prepared_parameters,
|
|
155
|
+
method=operation.method,
|
|
156
|
+
returns_rows=sql_statement.returns_rows(),
|
|
157
|
+
summary=summary,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
def _add_pipeline_operation(self, pipeline: Any, operation: _CompiledStackOperation) -> None:
|
|
161
|
+
parameters = operation.parameters or []
|
|
162
|
+
if operation.method == "execute":
|
|
163
|
+
if operation.returns_rows:
|
|
164
|
+
pipeline.add_fetchall(operation.sql, parameters)
|
|
165
|
+
else:
|
|
166
|
+
pipeline.add_execute(operation.sql, parameters)
|
|
167
|
+
return
|
|
168
|
+
|
|
169
|
+
if operation.method == "execute_many":
|
|
170
|
+
pipeline.add_executemany(operation.sql, parameters)
|
|
171
|
+
return
|
|
172
|
+
|
|
173
|
+
msg = f"Unsupported pipeline operation: {operation.method}"
|
|
174
|
+
raise ValueError(msg)
|
|
175
|
+
|
|
176
|
+
def _build_stack_results_from_pipeline(
|
|
177
|
+
self,
|
|
178
|
+
compiled_operations: "Sequence[_CompiledStackOperation]",
|
|
179
|
+
pipeline_results: "Sequence[Any]",
|
|
180
|
+
continue_on_error: bool,
|
|
181
|
+
observer: StackExecutionObserver,
|
|
182
|
+
) -> "list[StackResult]":
|
|
183
|
+
driver = cast("OraclePipelineDriver", self)
|
|
184
|
+
stack_results: list[StackResult] = []
|
|
185
|
+
for index, (compiled, result) in enumerate(zip(compiled_operations, pipeline_results, strict=False)):
|
|
186
|
+
try:
|
|
187
|
+
error = result.error
|
|
188
|
+
except AttributeError:
|
|
189
|
+
error = None
|
|
190
|
+
if error is not None:
|
|
191
|
+
stack_error = StackExecutionError(
|
|
192
|
+
index,
|
|
193
|
+
compiled.summary,
|
|
194
|
+
error,
|
|
195
|
+
adapter=type(self).__name__,
|
|
196
|
+
mode="continue-on-error" if continue_on_error else "fail-fast",
|
|
197
|
+
)
|
|
198
|
+
if continue_on_error:
|
|
199
|
+
observer.record_operation_error(stack_error)
|
|
200
|
+
stack_results.append(StackResult.from_error(stack_error))
|
|
201
|
+
continue
|
|
202
|
+
raise stack_error
|
|
203
|
+
|
|
204
|
+
stack_results.append(
|
|
205
|
+
build_pipeline_stack_result(
|
|
206
|
+
compiled.statement,
|
|
207
|
+
compiled.method,
|
|
208
|
+
compiled.returns_rows,
|
|
209
|
+
compiled.parameters,
|
|
210
|
+
result,
|
|
211
|
+
driver.driver_features,
|
|
212
|
+
)
|
|
213
|
+
)
|
|
214
|
+
return stack_results
|
|
215
|
+
|
|
216
|
+
def _wrap_pipeline_error(
|
|
217
|
+
self, error: Exception, stack: "StatementStack", continue_on_error: bool
|
|
218
|
+
) -> StackExecutionError:
|
|
219
|
+
mode = "continue-on-error" if continue_on_error else "fail-fast"
|
|
220
|
+
return StackExecutionError(
|
|
221
|
+
-1, "Oracle pipeline execution failed", error, adapter=type(self).__name__, mode=mode
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
class OracleSyncCursor:
|
|
226
|
+
"""Sync context manager for Oracle cursor management."""
|
|
227
|
+
|
|
228
|
+
__slots__ = ("connection", "cursor")
|
|
229
|
+
|
|
230
|
+
def __init__(self, connection: OracleSyncConnection) -> None:
|
|
231
|
+
self.connection = connection
|
|
232
|
+
self.cursor: Cursor | None = None
|
|
233
|
+
|
|
234
|
+
def __enter__(self) -> Cursor:
|
|
235
|
+
self.cursor = self.connection.cursor()
|
|
236
|
+
return self.cursor
|
|
237
|
+
|
|
238
|
+
def __exit__(self, *_: object) -> None:
|
|
239
|
+
if self.cursor is not None:
|
|
240
|
+
self.cursor.close()
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class OracleAsyncCursor:
|
|
244
|
+
"""Async context manager for Oracle cursor management."""
|
|
245
|
+
|
|
246
|
+
__slots__ = ("connection", "cursor")
|
|
247
|
+
|
|
248
|
+
def __init__(self, connection: OracleAsyncConnection) -> None:
|
|
249
|
+
self.connection = connection
|
|
250
|
+
self.cursor: AsyncCursor | None = None
|
|
251
|
+
|
|
252
|
+
async def __aenter__(self) -> AsyncCursor:
|
|
253
|
+
self.cursor = self.connection.cursor()
|
|
254
|
+
return self.cursor
|
|
255
|
+
|
|
256
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
|
257
|
+
_ = (exc_type, exc_val, exc_tb) # Mark as intentionally unused
|
|
258
|
+
if self.cursor is not None:
|
|
259
|
+
with contextlib.suppress(Exception):
|
|
260
|
+
# Oracle async cursors have a synchronous close method
|
|
261
|
+
# but we need to ensure proper cleanup in the event loop context
|
|
262
|
+
self.cursor.close()
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
class OracleSyncExceptionHandler:
|
|
266
|
+
"""Sync Context manager for handling Oracle database exceptions.
|
|
267
|
+
|
|
268
|
+
Maps Oracle ORA-XXXXX error codes to specific SQLSpec exceptions
|
|
269
|
+
for better error handling in application code.
|
|
270
|
+
|
|
271
|
+
Uses deferred exception pattern for mypyc compatibility: exceptions
|
|
272
|
+
are stored in pending_exception rather than raised from __exit__
|
|
273
|
+
to avoid ABI boundary violations with compiled code.
|
|
274
|
+
"""
|
|
275
|
+
|
|
276
|
+
__slots__ = ("pending_exception",)
|
|
277
|
+
|
|
278
|
+
def __init__(self) -> None:
|
|
279
|
+
self.pending_exception: Exception | None = None
|
|
280
|
+
|
|
281
|
+
def __enter__(self) -> "OracleSyncExceptionHandler":
|
|
282
|
+
return self
|
|
283
|
+
|
|
284
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool:
|
|
285
|
+
_ = exc_tb
|
|
286
|
+
if exc_type is None:
|
|
287
|
+
return False
|
|
288
|
+
if issubclass(exc_type, oracledb.DatabaseError):
|
|
289
|
+
self.pending_exception = create_mapped_exception(exc_val)
|
|
290
|
+
return True
|
|
291
|
+
return False
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class OracleAsyncExceptionHandler:
|
|
295
|
+
"""Async context manager for handling Oracle database exceptions.
|
|
296
|
+
|
|
297
|
+
Maps Oracle ORA-XXXXX error codes to specific SQLSpec exceptions
|
|
298
|
+
for better error handling in application code.
|
|
299
|
+
|
|
300
|
+
Uses deferred exception pattern for mypyc compatibility: exceptions
|
|
301
|
+
are stored in pending_exception rather than raised from __aexit__
|
|
302
|
+
to avoid ABI boundary violations with compiled code.
|
|
303
|
+
"""
|
|
304
|
+
|
|
305
|
+
__slots__ = ("pending_exception",)
|
|
306
|
+
|
|
307
|
+
def __init__(self) -> None:
|
|
308
|
+
self.pending_exception: Exception | None = None
|
|
309
|
+
|
|
310
|
+
async def __aenter__(self) -> "OracleAsyncExceptionHandler":
|
|
311
|
+
return self
|
|
312
|
+
|
|
313
|
+
async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool:
|
|
314
|
+
_ = exc_tb
|
|
315
|
+
if exc_type is None:
|
|
316
|
+
return False
|
|
317
|
+
if issubclass(exc_type, oracledb.DatabaseError):
|
|
318
|
+
self.pending_exception = create_mapped_exception(exc_val)
|
|
319
|
+
return True
|
|
320
|
+
return False
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
class OracleSyncDriver(OraclePipelineMixin, SyncDriverAdapterBase):
|
|
324
|
+
"""Synchronous Oracle Database driver.
|
|
325
|
+
|
|
326
|
+
Provides Oracle Database connectivity with parameter style conversion,
|
|
327
|
+
error handling, and transaction management.
|
|
328
|
+
"""
|
|
329
|
+
|
|
330
|
+
__slots__ = ("_data_dictionary", "_oracle_version", "_pipeline_support", "_pipeline_support_reason")
|
|
331
|
+
dialect = "oracle"
|
|
332
|
+
|
|
333
|
+
def __init__(
|
|
334
|
+
self,
|
|
335
|
+
connection: OracleSyncConnection,
|
|
336
|
+
statement_config: "StatementConfig | None" = None,
|
|
337
|
+
driver_features: "dict[str, Any] | None" = None,
|
|
338
|
+
) -> None:
|
|
339
|
+
if statement_config is None:
|
|
340
|
+
statement_config = default_statement_config.replace(
|
|
341
|
+
enable_caching=get_cache_config().compiled_cache_enabled
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
|
|
345
|
+
self._data_dictionary: OracledbSyncDataDictionary | None = None
|
|
346
|
+
self._pipeline_support: bool | None = None
|
|
347
|
+
self._pipeline_support_reason: str | None = None
|
|
348
|
+
self._oracle_version: VersionInfo | None = None
|
|
349
|
+
|
|
350
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
351
|
+
# CORE DISPATCH METHODS
|
|
352
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
353
|
+
|
|
354
|
+
def dispatch_execute(self, cursor: "Cursor", statement: "SQL") -> "ExecutionResult":
|
|
355
|
+
"""Execute single SQL statement with Oracle data handling.
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
cursor: Oracle cursor object
|
|
359
|
+
statement: SQL statement to execute
|
|
360
|
+
|
|
361
|
+
Returns:
|
|
362
|
+
Execution result containing data for SELECT statements or row count for others
|
|
363
|
+
|
|
364
|
+
"""
|
|
365
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
366
|
+
|
|
367
|
+
prepared_parameters = coerce_large_string_parameters_sync(
|
|
368
|
+
self.connection, prepared_parameters, lob_type=oracledb.DB_TYPE_CLOB, threshold=LARGE_STRING_THRESHOLD
|
|
369
|
+
)
|
|
370
|
+
prepared_parameters = cast("list[Any] | tuple[Any, ...] | dict[Any, Any] | None", prepared_parameters)
|
|
371
|
+
|
|
372
|
+
cursor.execute(sql, prepared_parameters or {})
|
|
373
|
+
|
|
374
|
+
# SELECT result processing for Oracle
|
|
375
|
+
if statement.returns_rows():
|
|
376
|
+
fetched_data = cursor.fetchall()
|
|
377
|
+
data, column_names = collect_sync_rows(
|
|
378
|
+
cast("list[Any] | None", fetched_data), cursor.description, self.driver_features
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
return self.create_execution_result(
|
|
382
|
+
cursor, selected_data=data, column_names=column_names, data_row_count=len(data), is_select_result=True
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
# Non-SELECT result processing
|
|
386
|
+
affected_rows = resolve_rowcount(cursor)
|
|
387
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows)
|
|
388
|
+
|
|
389
|
+
def dispatch_execute_many(self, cursor: "Cursor", statement: "SQL") -> "ExecutionResult":
|
|
390
|
+
"""Execute SQL with multiple parameter sets using Oracle batch processing.
|
|
391
|
+
|
|
392
|
+
Args:
|
|
393
|
+
cursor: Oracle cursor object
|
|
394
|
+
statement: SQL statement with multiple parameter sets
|
|
395
|
+
|
|
396
|
+
Returns:
|
|
397
|
+
Execution result with affected row count
|
|
398
|
+
|
|
399
|
+
Raises:
|
|
400
|
+
ValueError: If no parameters are provided
|
|
401
|
+
|
|
402
|
+
"""
|
|
403
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
404
|
+
|
|
405
|
+
prepared_parameters = normalize_execute_many_parameters_sync(prepared_parameters)
|
|
406
|
+
cursor.executemany(sql, prepared_parameters)
|
|
407
|
+
|
|
408
|
+
affected_rows = len(prepared_parameters)
|
|
409
|
+
|
|
410
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows, is_many_result=True)
|
|
411
|
+
|
|
412
|
+
def dispatch_execute_script(self, cursor: "Cursor", statement: "SQL") -> "ExecutionResult":
|
|
413
|
+
"""Execute SQL script with statement splitting and parameter handling.
|
|
414
|
+
|
|
415
|
+
Parameters are embedded as static values for script execution compatibility.
|
|
416
|
+
|
|
417
|
+
Args:
|
|
418
|
+
cursor: Oracle cursor object
|
|
419
|
+
statement: SQL script statement to execute
|
|
420
|
+
|
|
421
|
+
Returns:
|
|
422
|
+
Execution result containing statement count and success information
|
|
423
|
+
|
|
424
|
+
"""
|
|
425
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
426
|
+
prepared_parameters = cast("list[Any] | tuple[Any, ...] | dict[Any, Any] | None", prepared_parameters)
|
|
427
|
+
statements = self.split_script_statements(sql, statement.statement_config, strip_trailing_semicolon=True)
|
|
428
|
+
|
|
429
|
+
successful_count = 0
|
|
430
|
+
last_cursor = cursor
|
|
431
|
+
|
|
432
|
+
for stmt in statements:
|
|
433
|
+
cursor.execute(stmt, prepared_parameters or {})
|
|
434
|
+
successful_count += 1
|
|
435
|
+
|
|
436
|
+
return self.create_execution_result(
|
|
437
|
+
last_cursor, statement_count=len(statements), successful_statements=successful_count, is_script_result=True
|
|
438
|
+
)
|
|
439
|
+
|
|
440
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
441
|
+
# TRANSACTION MANAGEMENT
|
|
442
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
443
|
+
|
|
444
|
+
def begin(self) -> None:
|
|
445
|
+
"""Begin a database transaction.
|
|
446
|
+
|
|
447
|
+
Oracle handles transactions automatically, so this is a no-op.
|
|
448
|
+
"""
|
|
449
|
+
# Oracle handles transactions implicitly
|
|
450
|
+
|
|
451
|
+
def commit(self) -> None:
|
|
452
|
+
"""Commit the current transaction.
|
|
453
|
+
|
|
454
|
+
Raises:
|
|
455
|
+
SQLSpecError: If commit fails
|
|
456
|
+
|
|
457
|
+
"""
|
|
458
|
+
try:
|
|
459
|
+
self.connection.commit()
|
|
460
|
+
except oracledb.Error as e:
|
|
461
|
+
msg = f"Failed to commit Oracle transaction: {e}"
|
|
462
|
+
raise SQLSpecError(msg) from e
|
|
463
|
+
|
|
464
|
+
def rollback(self) -> None:
|
|
465
|
+
"""Rollback the current transaction.
|
|
466
|
+
|
|
467
|
+
Raises:
|
|
468
|
+
SQLSpecError: If rollback fails
|
|
469
|
+
|
|
470
|
+
"""
|
|
471
|
+
try:
|
|
472
|
+
self.connection.rollback()
|
|
473
|
+
except oracledb.Error as e:
|
|
474
|
+
msg = f"Failed to rollback Oracle transaction: {e}"
|
|
475
|
+
raise SQLSpecError(msg) from e
|
|
476
|
+
|
|
477
|
+
def with_cursor(self, connection: OracleSyncConnection) -> OracleSyncCursor:
|
|
478
|
+
"""Create context manager for Oracle cursor.
|
|
479
|
+
|
|
480
|
+
Args:
|
|
481
|
+
connection: Oracle database connection
|
|
482
|
+
|
|
483
|
+
Returns:
|
|
484
|
+
Context manager for cursor operations
|
|
485
|
+
|
|
486
|
+
"""
|
|
487
|
+
return OracleSyncCursor(connection)
|
|
488
|
+
|
|
489
|
+
def handle_database_exceptions(self) -> "OracleSyncExceptionHandler":
|
|
490
|
+
"""Handle database-specific exceptions and wrap them appropriately."""
|
|
491
|
+
return OracleSyncExceptionHandler()
|
|
492
|
+
|
|
493
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
494
|
+
# ARROW API METHODS
|
|
495
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
496
|
+
|
|
497
|
+
def select_to_arrow(
|
|
498
|
+
self,
|
|
499
|
+
statement: "Statement | QueryBuilder",
|
|
500
|
+
/,
|
|
501
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
502
|
+
statement_config: "StatementConfig | None" = None,
|
|
503
|
+
return_format: "ArrowReturnFormat" = "table",
|
|
504
|
+
native_only: bool = False,
|
|
505
|
+
batch_size: int | None = None,
|
|
506
|
+
arrow_schema: Any = None,
|
|
507
|
+
**kwargs: Any,
|
|
508
|
+
) -> "Any":
|
|
509
|
+
"""Execute query and return results as Apache Arrow format using Oracle native support.
|
|
510
|
+
|
|
511
|
+
This implementation uses Oracle's native execute_df()/fetch_df_all() methods
|
|
512
|
+
which return OracleDataFrame objects with Arrow PyCapsule interface, providing
|
|
513
|
+
zero-copy data transfer and 5-10x performance improvement over dict conversion.
|
|
514
|
+
If native Arrow is unavailable and native_only is False, it falls back to the
|
|
515
|
+
conversion path.
|
|
516
|
+
|
|
517
|
+
Args:
|
|
518
|
+
statement: SQL query string, Statement, or QueryBuilder
|
|
519
|
+
*parameters: Query parameters (same format as execute()/select())
|
|
520
|
+
statement_config: Optional statement configuration override
|
|
521
|
+
return_format: "table" for pyarrow.Table (default), "batch" for RecordBatch,
|
|
522
|
+
"batches" for list of RecordBatch, "reader" for RecordBatchReader
|
|
523
|
+
native_only: If True, raise error if native Arrow is unavailable
|
|
524
|
+
batch_size: Rows per batch when using "batch" or "batches" format
|
|
525
|
+
arrow_schema: Optional pyarrow.Schema for type casting
|
|
526
|
+
**kwargs: Additional keyword arguments
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
ArrowResult containing pyarrow.Table or RecordBatch
|
|
530
|
+
|
|
531
|
+
Examples:
|
|
532
|
+
>>> result = driver.select_to_arrow(
|
|
533
|
+
... "SELECT * FROM users WHERE age > :1", (18,)
|
|
534
|
+
... )
|
|
535
|
+
>>> df = result.to_pandas()
|
|
536
|
+
>>> print(df.head())
|
|
537
|
+
|
|
538
|
+
"""
|
|
539
|
+
ensure_pyarrow()
|
|
540
|
+
|
|
541
|
+
import pyarrow as pa
|
|
542
|
+
|
|
543
|
+
config = statement_config or self.statement_config
|
|
544
|
+
prepared_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs)
|
|
545
|
+
sql, prepared_parameters = self._get_compiled_sql(prepared_statement, config)
|
|
546
|
+
|
|
547
|
+
try:
|
|
548
|
+
oracle_df = self._execute_arrow_dataframe(sql, prepared_parameters, batch_size)
|
|
549
|
+
except AttributeError as exc:
|
|
550
|
+
if native_only:
|
|
551
|
+
msg = "Oracle native Arrow support is not available for this connection."
|
|
552
|
+
raise ImproperConfigurationError(msg) from exc
|
|
553
|
+
return super().select_to_arrow(
|
|
554
|
+
statement,
|
|
555
|
+
*parameters,
|
|
556
|
+
statement_config=statement_config,
|
|
557
|
+
return_format=return_format,
|
|
558
|
+
native_only=native_only,
|
|
559
|
+
batch_size=batch_size,
|
|
560
|
+
arrow_schema=arrow_schema,
|
|
561
|
+
**kwargs,
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
arrow_table = pa.table(oracle_df)
|
|
565
|
+
column_names = normalize_column_names(arrow_table.column_names, self.driver_features)
|
|
566
|
+
if column_names != arrow_table.column_names:
|
|
567
|
+
arrow_table = arrow_table.rename_columns(column_names)
|
|
568
|
+
|
|
569
|
+
return build_arrow_result_from_table(
|
|
570
|
+
prepared_statement,
|
|
571
|
+
arrow_table,
|
|
572
|
+
return_format=return_format,
|
|
573
|
+
batch_size=batch_size,
|
|
574
|
+
arrow_schema=arrow_schema,
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
578
|
+
# STACK EXECUTION METHODS
|
|
579
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
580
|
+
|
|
581
|
+
def execute_stack(self, stack: "StatementStack", *, continue_on_error: bool = False) -> "tuple[StackResult, ...]":
|
|
582
|
+
"""Execute a StatementStack using Oracle's pipeline when available."""
|
|
583
|
+
if not isinstance(stack, StatementStack) or not stack:
|
|
584
|
+
return super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
585
|
+
|
|
586
|
+
blocker = self._stack_native_blocker(stack)
|
|
587
|
+
if blocker is not None:
|
|
588
|
+
self._log_pipeline_skip(blocker, stack)
|
|
589
|
+
return super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
590
|
+
|
|
591
|
+
if not self._pipeline_native_supported():
|
|
592
|
+
self._log_pipeline_skip(self._pipeline_support_reason or "database_version", stack)
|
|
593
|
+
return super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
594
|
+
|
|
595
|
+
return self._execute_stack_native(stack, continue_on_error=continue_on_error)
|
|
596
|
+
|
|
597
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
598
|
+
# STORAGE API METHODS
|
|
599
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
600
|
+
|
|
601
|
+
def select_to_storage(
|
|
602
|
+
self,
|
|
603
|
+
statement: "Statement | QueryBuilder | SQL | str",
|
|
604
|
+
destination: "StorageDestination",
|
|
605
|
+
/,
|
|
606
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
607
|
+
statement_config: "StatementConfig | None" = None,
|
|
608
|
+
partitioner: "dict[str, object] | None" = None,
|
|
609
|
+
format_hint: "StorageFormat | None" = None,
|
|
610
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
611
|
+
**kwargs: Any,
|
|
612
|
+
) -> "StorageBridgeJob":
|
|
613
|
+
"""Execute a query and stream Arrow-formatted output to storage (sync)."""
|
|
614
|
+
self._require_capability("arrow_export_enabled")
|
|
615
|
+
arrow_result = self.select_to_arrow(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
616
|
+
sync_pipeline = self._storage_pipeline()
|
|
617
|
+
telemetry_payload = self._write_result_to_storage_sync(
|
|
618
|
+
arrow_result, destination, format_hint=format_hint, pipeline=sync_pipeline
|
|
619
|
+
)
|
|
620
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
621
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
622
|
+
|
|
623
|
+
def load_from_arrow(
|
|
624
|
+
self,
|
|
625
|
+
table: str,
|
|
626
|
+
source: "ArrowResult | Any",
|
|
627
|
+
*,
|
|
628
|
+
partitioner: "dict[str, object] | None" = None,
|
|
629
|
+
overwrite: bool = False,
|
|
630
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
631
|
+
) -> "StorageBridgeJob":
|
|
632
|
+
"""Load Arrow data into Oracle using batched executemany calls."""
|
|
633
|
+
self._require_capability("arrow_import_enabled")
|
|
634
|
+
arrow_table = self._coerce_arrow_table(source)
|
|
635
|
+
if overwrite:
|
|
636
|
+
statement = build_truncate_statement(table)
|
|
637
|
+
with self.handle_database_exceptions():
|
|
638
|
+
self.connection.execute(statement)
|
|
639
|
+
columns, records = self._arrow_table_to_rows(arrow_table)
|
|
640
|
+
if records:
|
|
641
|
+
statement = build_insert_statement(table, columns)
|
|
642
|
+
with self.with_cursor(self.connection) as cursor, self.handle_database_exceptions():
|
|
643
|
+
cursor.executemany(statement, records)
|
|
644
|
+
telemetry_payload = self._build_ingest_telemetry(arrow_table)
|
|
645
|
+
telemetry_payload["destination"] = table
|
|
646
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
647
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
648
|
+
|
|
649
|
+
def load_from_storage(
|
|
650
|
+
self,
|
|
651
|
+
table: str,
|
|
652
|
+
source: "StorageDestination",
|
|
653
|
+
*,
|
|
654
|
+
file_format: "StorageFormat",
|
|
655
|
+
partitioner: "dict[str, object] | None" = None,
|
|
656
|
+
overwrite: bool = False,
|
|
657
|
+
) -> "StorageBridgeJob":
|
|
658
|
+
"""Load staged artifacts into Oracle."""
|
|
659
|
+
arrow_table, inbound = self._read_arrow_from_storage_sync(source, file_format=file_format)
|
|
660
|
+
return self.load_from_arrow(table, arrow_table, partitioner=partitioner, overwrite=overwrite, telemetry=inbound)
|
|
661
|
+
|
|
662
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
663
|
+
# UTILITY METHODS
|
|
664
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
665
|
+
|
|
666
|
+
@property
|
|
667
|
+
def data_dictionary(self) -> "OracledbSyncDataDictionary":
|
|
668
|
+
"""Get the data dictionary for this driver.
|
|
669
|
+
|
|
670
|
+
Returns:
|
|
671
|
+
Data dictionary instance for metadata queries
|
|
672
|
+
|
|
673
|
+
"""
|
|
674
|
+
if self._data_dictionary is None:
|
|
675
|
+
self._data_dictionary = OracledbSyncDataDictionary()
|
|
676
|
+
return self._data_dictionary
|
|
677
|
+
|
|
678
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
679
|
+
# PRIVATE/INTERNAL METHODS
|
|
680
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
681
|
+
|
|
682
|
+
def _connection_in_transaction(self) -> bool:
|
|
683
|
+
"""Check if connection is in transaction."""
|
|
684
|
+
return False
|
|
685
|
+
|
|
686
|
+
def _detect_oracle_version(self) -> "VersionInfo | None":
|
|
687
|
+
if self._oracle_version is not None:
|
|
688
|
+
return self._oracle_version
|
|
689
|
+
version = self.data_dictionary.get_version(self)
|
|
690
|
+
self._oracle_version = version
|
|
691
|
+
return version
|
|
692
|
+
|
|
693
|
+
def _detect_oracledb_version(self) -> "tuple[int, int, int]":
|
|
694
|
+
return ORACLEDB_VERSION
|
|
695
|
+
|
|
696
|
+
def _execute_arrow_dataframe(self, sql: str, parameters: "Any", batch_size: int | None) -> "Any":
|
|
697
|
+
"""Execute SQL and return an Oracle DataFrame."""
|
|
698
|
+
params = parameters if parameters is not None else []
|
|
699
|
+
try:
|
|
700
|
+
execute_df = self.connection.execute_df
|
|
701
|
+
except AttributeError:
|
|
702
|
+
execute_df = None
|
|
703
|
+
if execute_df is not None:
|
|
704
|
+
try:
|
|
705
|
+
return execute_df(sql, params, arraysize=batch_size or 1000)
|
|
706
|
+
except TypeError:
|
|
707
|
+
return execute_df(sql, params)
|
|
708
|
+
return self.connection.fetch_df_all(statement=sql, parameters=params, arraysize=batch_size or 1000)
|
|
709
|
+
|
|
710
|
+
def _execute_stack_native(self, stack: "StatementStack", *, continue_on_error: bool) -> "tuple[StackResult, ...]":
|
|
711
|
+
compiled_operations = [self._prepare_pipeline_operation(op) for op in stack.operations]
|
|
712
|
+
pipeline = oracledb.create_pipeline()
|
|
713
|
+
for compiled in compiled_operations:
|
|
714
|
+
self._add_pipeline_operation(pipeline, compiled)
|
|
715
|
+
|
|
716
|
+
results: list[StackResult] = []
|
|
717
|
+
started_transaction = False
|
|
718
|
+
|
|
719
|
+
with StackExecutionObserver(self, stack, continue_on_error, native_pipeline=True) as observer:
|
|
720
|
+
try:
|
|
721
|
+
if not continue_on_error and not self._connection_in_transaction():
|
|
722
|
+
self.begin()
|
|
723
|
+
started_transaction = True
|
|
724
|
+
|
|
725
|
+
pipeline_results = self.connection.run_pipeline(pipeline, continue_on_error=continue_on_error)
|
|
726
|
+
results = self._build_stack_results_from_pipeline(
|
|
727
|
+
compiled_operations, pipeline_results, continue_on_error, observer
|
|
728
|
+
)
|
|
729
|
+
|
|
730
|
+
if started_transaction:
|
|
731
|
+
self.commit()
|
|
732
|
+
except Exception as exc:
|
|
733
|
+
if started_transaction:
|
|
734
|
+
try:
|
|
735
|
+
self.rollback()
|
|
736
|
+
except Exception as rollback_error: # pragma: no cover - diagnostics only
|
|
737
|
+
logger.debug("Rollback after pipeline failure failed: %s", rollback_error)
|
|
738
|
+
raise self._wrap_pipeline_error(exc, stack, continue_on_error) from exc
|
|
739
|
+
|
|
740
|
+
return tuple(results)
|
|
741
|
+
|
|
742
|
+
def _pipeline_native_supported(self) -> bool:
|
|
743
|
+
if self._pipeline_support is not None:
|
|
744
|
+
return self._pipeline_support
|
|
745
|
+
|
|
746
|
+
if self.stack_native_disabled:
|
|
747
|
+
self._pipeline_support = False
|
|
748
|
+
self._pipeline_support_reason = "env_override"
|
|
749
|
+
return False
|
|
750
|
+
|
|
751
|
+
if self._detect_oracledb_version() < PIPELINE_MIN_DRIVER_VERSION:
|
|
752
|
+
self._pipeline_support = False
|
|
753
|
+
self._pipeline_support_reason = "driver_version"
|
|
754
|
+
return False
|
|
755
|
+
|
|
756
|
+
if not has_pipeline_capability(self.connection):
|
|
757
|
+
self._pipeline_support = False
|
|
758
|
+
self._pipeline_support_reason = "driver_api_missing"
|
|
759
|
+
return False
|
|
760
|
+
|
|
761
|
+
version_info = self._detect_oracle_version()
|
|
762
|
+
if version_info and version_info.major >= PIPELINE_MIN_DATABASE_MAJOR:
|
|
763
|
+
self._pipeline_support = True
|
|
764
|
+
self._pipeline_support_reason = None
|
|
765
|
+
return True
|
|
766
|
+
|
|
767
|
+
self._pipeline_support = False
|
|
768
|
+
self._pipeline_support_reason = "database_version"
|
|
769
|
+
return False
|
|
770
|
+
|
|
771
|
+
|
|
772
|
+
class OracleAsyncDriver(OraclePipelineMixin, AsyncDriverAdapterBase):
|
|
773
|
+
"""Asynchronous Oracle Database driver.
|
|
774
|
+
|
|
775
|
+
Provides Oracle Database connectivity with parameter style conversion,
|
|
776
|
+
error handling, and transaction management for async operations.
|
|
777
|
+
"""
|
|
778
|
+
|
|
779
|
+
__slots__ = ("_data_dictionary", "_oracle_version", "_pipeline_support", "_pipeline_support_reason")
|
|
780
|
+
dialect = "oracle"
|
|
781
|
+
|
|
782
|
+
def __init__(
|
|
783
|
+
self,
|
|
784
|
+
connection: OracleAsyncConnection,
|
|
785
|
+
statement_config: "StatementConfig | None" = None,
|
|
786
|
+
driver_features: "dict[str, Any] | None" = None,
|
|
787
|
+
) -> None:
|
|
788
|
+
if statement_config is None:
|
|
789
|
+
statement_config = default_statement_config.replace(
|
|
790
|
+
enable_caching=get_cache_config().compiled_cache_enabled
|
|
791
|
+
)
|
|
792
|
+
|
|
793
|
+
super().__init__(connection=connection, statement_config=statement_config, driver_features=driver_features)
|
|
794
|
+
self._data_dictionary: OracledbAsyncDataDictionary | None = None
|
|
795
|
+
self._pipeline_support: bool | None = None
|
|
796
|
+
self._pipeline_support_reason: str | None = None
|
|
797
|
+
self._oracle_version: VersionInfo | None = None
|
|
798
|
+
|
|
799
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
800
|
+
# CORE DISPATCH METHODS
|
|
801
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
802
|
+
|
|
803
|
+
async def dispatch_execute(self, cursor: "AsyncCursor", statement: "SQL") -> "ExecutionResult":
|
|
804
|
+
"""Execute single SQL statement with Oracle data handling.
|
|
805
|
+
|
|
806
|
+
Args:
|
|
807
|
+
cursor: Oracle cursor object
|
|
808
|
+
statement: SQL statement to execute
|
|
809
|
+
|
|
810
|
+
Returns:
|
|
811
|
+
Execution result containing data for SELECT statements or row count for others
|
|
812
|
+
|
|
813
|
+
"""
|
|
814
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
815
|
+
|
|
816
|
+
prepared_parameters = await coerce_large_string_parameters_async(
|
|
817
|
+
self.connection, prepared_parameters, lob_type=oracledb.DB_TYPE_CLOB, threshold=LARGE_STRING_THRESHOLD
|
|
818
|
+
)
|
|
819
|
+
prepared_parameters = cast("list[Any] | tuple[Any, ...] | dict[Any, Any] | None", prepared_parameters)
|
|
820
|
+
|
|
821
|
+
await cursor.execute(sql, prepared_parameters or {})
|
|
822
|
+
|
|
823
|
+
# SELECT result processing for Oracle
|
|
824
|
+
is_select_like = statement.returns_rows() or self._should_force_select(statement, cursor)
|
|
825
|
+
|
|
826
|
+
if is_select_like:
|
|
827
|
+
fetched_data = await cursor.fetchall()
|
|
828
|
+
data, column_names = await collect_async_rows(
|
|
829
|
+
cast("list[Any] | None", fetched_data), cursor.description, self.driver_features
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
return self.create_execution_result(
|
|
833
|
+
cursor, selected_data=data, column_names=column_names, data_row_count=len(data), is_select_result=True
|
|
834
|
+
)
|
|
835
|
+
|
|
836
|
+
# Non-SELECT result processing
|
|
837
|
+
affected_rows = resolve_rowcount(cursor)
|
|
838
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows)
|
|
839
|
+
|
|
840
|
+
async def dispatch_execute_many(self, cursor: "AsyncCursor", statement: "SQL") -> "ExecutionResult":
|
|
841
|
+
"""Execute SQL with multiple parameter sets using Oracle batch processing.
|
|
842
|
+
|
|
843
|
+
Args:
|
|
844
|
+
cursor: Oracle cursor object
|
|
845
|
+
statement: SQL statement with multiple parameter sets
|
|
846
|
+
|
|
847
|
+
Returns:
|
|
848
|
+
Execution result with affected row count
|
|
849
|
+
|
|
850
|
+
Raises:
|
|
851
|
+
ValueError: If no parameters are provided
|
|
852
|
+
|
|
853
|
+
"""
|
|
854
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
855
|
+
|
|
856
|
+
prepared_parameters = normalize_execute_many_parameters_async(prepared_parameters)
|
|
857
|
+
await cursor.executemany(sql, prepared_parameters)
|
|
858
|
+
|
|
859
|
+
affected_rows = len(prepared_parameters)
|
|
860
|
+
|
|
861
|
+
return self.create_execution_result(cursor, rowcount_override=affected_rows, is_many_result=True)
|
|
862
|
+
|
|
863
|
+
async def dispatch_execute_script(self, cursor: "AsyncCursor", statement: "SQL") -> "ExecutionResult":
|
|
864
|
+
"""Execute SQL script with statement splitting and parameter handling.
|
|
865
|
+
|
|
866
|
+
Parameters are embedded as static values for script execution compatibility.
|
|
867
|
+
|
|
868
|
+
Args:
|
|
869
|
+
cursor: Oracle cursor object
|
|
870
|
+
statement: SQL script statement to execute
|
|
871
|
+
|
|
872
|
+
Returns:
|
|
873
|
+
Execution result containing statement count and success information
|
|
874
|
+
|
|
875
|
+
"""
|
|
876
|
+
sql, prepared_parameters = self._get_compiled_sql(statement, self.statement_config)
|
|
877
|
+
statements = self.split_script_statements(sql, statement.statement_config, strip_trailing_semicolon=True)
|
|
878
|
+
script_params = cast("dict[str, Any]", prepared_parameters or {})
|
|
879
|
+
|
|
880
|
+
successful_count = 0
|
|
881
|
+
last_cursor = cursor
|
|
882
|
+
|
|
883
|
+
for stmt in statements:
|
|
884
|
+
await cursor.execute(stmt, script_params)
|
|
885
|
+
successful_count += 1
|
|
886
|
+
|
|
887
|
+
return self.create_execution_result(
|
|
888
|
+
last_cursor, statement_count=len(statements), successful_statements=successful_count, is_script_result=True
|
|
889
|
+
)
|
|
890
|
+
|
|
891
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
892
|
+
# TRANSACTION MANAGEMENT
|
|
893
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
894
|
+
|
|
895
|
+
async def begin(self) -> None:
|
|
896
|
+
"""Begin a database transaction.
|
|
897
|
+
|
|
898
|
+
Oracle handles transactions automatically, so this is a no-op.
|
|
899
|
+
"""
|
|
900
|
+
# Oracle handles transactions implicitly
|
|
901
|
+
|
|
902
|
+
async def commit(self) -> None:
|
|
903
|
+
"""Commit the current transaction.
|
|
904
|
+
|
|
905
|
+
Raises:
|
|
906
|
+
SQLSpecError: If commit fails
|
|
907
|
+
|
|
908
|
+
"""
|
|
909
|
+
try:
|
|
910
|
+
await self.connection.commit()
|
|
911
|
+
except oracledb.Error as e:
|
|
912
|
+
msg = f"Failed to commit Oracle transaction: {e}"
|
|
913
|
+
raise SQLSpecError(msg) from e
|
|
914
|
+
|
|
915
|
+
async def rollback(self) -> None:
|
|
916
|
+
"""Rollback the current transaction.
|
|
917
|
+
|
|
918
|
+
Raises:
|
|
919
|
+
SQLSpecError: If rollback fails
|
|
920
|
+
|
|
921
|
+
"""
|
|
922
|
+
try:
|
|
923
|
+
await self.connection.rollback()
|
|
924
|
+
except oracledb.Error as e:
|
|
925
|
+
msg = f"Failed to rollback Oracle transaction: {e}"
|
|
926
|
+
raise SQLSpecError(msg) from e
|
|
927
|
+
|
|
928
|
+
def with_cursor(self, connection: OracleAsyncConnection) -> OracleAsyncCursor:
|
|
929
|
+
"""Create context manager for Oracle cursor.
|
|
930
|
+
|
|
931
|
+
Args:
|
|
932
|
+
connection: Oracle database connection
|
|
933
|
+
|
|
934
|
+
Returns:
|
|
935
|
+
Context manager for cursor operations
|
|
936
|
+
|
|
937
|
+
"""
|
|
938
|
+
return OracleAsyncCursor(connection)
|
|
939
|
+
|
|
940
|
+
def handle_database_exceptions(self) -> "OracleAsyncExceptionHandler":
|
|
941
|
+
"""Handle database-specific exceptions and wrap them appropriately."""
|
|
942
|
+
return OracleAsyncExceptionHandler()
|
|
943
|
+
|
|
944
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
945
|
+
# ARROW API METHODS
|
|
946
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
947
|
+
|
|
948
|
+
async def select_to_arrow(
|
|
949
|
+
self,
|
|
950
|
+
statement: "Statement | QueryBuilder",
|
|
951
|
+
/,
|
|
952
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
953
|
+
statement_config: "StatementConfig | None" = None,
|
|
954
|
+
return_format: "ArrowReturnFormat" = "table",
|
|
955
|
+
native_only: bool = False,
|
|
956
|
+
batch_size: int | None = None,
|
|
957
|
+
arrow_schema: Any = None,
|
|
958
|
+
**kwargs: Any,
|
|
959
|
+
) -> "Any":
|
|
960
|
+
"""Execute query and return results as Apache Arrow format using Oracle native support.
|
|
961
|
+
|
|
962
|
+
This implementation uses Oracle's native execute_df()/fetch_df_all() methods
|
|
963
|
+
which return OracleDataFrame objects with Arrow PyCapsule interface, providing
|
|
964
|
+
zero-copy data transfer and 5-10x performance improvement over dict conversion.
|
|
965
|
+
If native Arrow is unavailable and native_only is False, it falls back to the
|
|
966
|
+
conversion path.
|
|
967
|
+
|
|
968
|
+
Args:
|
|
969
|
+
statement: SQL query string, Statement, or QueryBuilder
|
|
970
|
+
*parameters: Query parameters (same format as execute()/select())
|
|
971
|
+
statement_config: Optional statement configuration override
|
|
972
|
+
return_format: "table" for pyarrow.Table (default), "batch" for RecordBatch,
|
|
973
|
+
"batches" for list of RecordBatch, "reader" for RecordBatchReader
|
|
974
|
+
native_only: If True, raise error if native Arrow is unavailable
|
|
975
|
+
batch_size: Rows per batch when using "batch" or "batches" format
|
|
976
|
+
arrow_schema: Optional pyarrow.Schema for type casting
|
|
977
|
+
**kwargs: Additional keyword arguments
|
|
978
|
+
|
|
979
|
+
Returns:
|
|
980
|
+
ArrowResult containing pyarrow.Table or RecordBatch
|
|
981
|
+
|
|
982
|
+
Examples:
|
|
983
|
+
>>> result = await driver.select_to_arrow(
|
|
984
|
+
... "SELECT * FROM users WHERE age > :1", (18,)
|
|
985
|
+
... )
|
|
986
|
+
>>> df = result.to_pandas()
|
|
987
|
+
>>> print(df.head())
|
|
988
|
+
|
|
989
|
+
"""
|
|
990
|
+
ensure_pyarrow()
|
|
991
|
+
|
|
992
|
+
import pyarrow as pa
|
|
993
|
+
|
|
994
|
+
config = statement_config or self.statement_config
|
|
995
|
+
prepared_statement = self.prepare_statement(statement, parameters, statement_config=config, kwargs=kwargs)
|
|
996
|
+
sql, prepared_parameters = self._get_compiled_sql(prepared_statement, config)
|
|
997
|
+
|
|
998
|
+
try:
|
|
999
|
+
oracle_df = await self._execute_arrow_dataframe(sql, prepared_parameters, batch_size)
|
|
1000
|
+
except AttributeError as exc:
|
|
1001
|
+
if native_only:
|
|
1002
|
+
msg = "Oracle native Arrow support is not available for this connection."
|
|
1003
|
+
raise ImproperConfigurationError(msg) from exc
|
|
1004
|
+
return await super().select_to_arrow(
|
|
1005
|
+
statement,
|
|
1006
|
+
*parameters,
|
|
1007
|
+
statement_config=statement_config,
|
|
1008
|
+
return_format=return_format,
|
|
1009
|
+
native_only=native_only,
|
|
1010
|
+
batch_size=batch_size,
|
|
1011
|
+
arrow_schema=arrow_schema,
|
|
1012
|
+
**kwargs,
|
|
1013
|
+
)
|
|
1014
|
+
|
|
1015
|
+
arrow_table = pa.table(oracle_df)
|
|
1016
|
+
column_names = normalize_column_names(arrow_table.column_names, self.driver_features)
|
|
1017
|
+
if column_names != arrow_table.column_names:
|
|
1018
|
+
arrow_table = arrow_table.rename_columns(column_names)
|
|
1019
|
+
|
|
1020
|
+
return build_arrow_result_from_table(
|
|
1021
|
+
prepared_statement,
|
|
1022
|
+
arrow_table,
|
|
1023
|
+
return_format=return_format,
|
|
1024
|
+
batch_size=batch_size,
|
|
1025
|
+
arrow_schema=arrow_schema,
|
|
1026
|
+
)
|
|
1027
|
+
|
|
1028
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1029
|
+
# STACK EXECUTION METHODS
|
|
1030
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1031
|
+
|
|
1032
|
+
async def execute_stack(
|
|
1033
|
+
self, stack: "StatementStack", *, continue_on_error: bool = False
|
|
1034
|
+
) -> "tuple[StackResult, ...]":
|
|
1035
|
+
"""Execute a StatementStack using Oracle's pipeline when available."""
|
|
1036
|
+
if not isinstance(stack, StatementStack) or not stack:
|
|
1037
|
+
return await super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
1038
|
+
|
|
1039
|
+
blocker = self._stack_native_blocker(stack)
|
|
1040
|
+
if blocker is not None:
|
|
1041
|
+
self._log_pipeline_skip(blocker, stack)
|
|
1042
|
+
return await super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
1043
|
+
|
|
1044
|
+
if not await self._pipeline_native_supported():
|
|
1045
|
+
self._log_pipeline_skip(self._pipeline_support_reason or "database_version", stack)
|
|
1046
|
+
return await super().execute_stack(stack, continue_on_error=continue_on_error)
|
|
1047
|
+
|
|
1048
|
+
return await self._execute_stack_native(stack, continue_on_error=continue_on_error)
|
|
1049
|
+
|
|
1050
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1051
|
+
# STORAGE API METHODS
|
|
1052
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1053
|
+
|
|
1054
|
+
async def select_to_storage(
|
|
1055
|
+
self,
|
|
1056
|
+
statement: "Statement | QueryBuilder | SQL | str",
|
|
1057
|
+
destination: "StorageDestination",
|
|
1058
|
+
/,
|
|
1059
|
+
*parameters: "StatementParameters | StatementFilter",
|
|
1060
|
+
statement_config: "StatementConfig | None" = None,
|
|
1061
|
+
partitioner: "dict[str, object] | None" = None,
|
|
1062
|
+
format_hint: "StorageFormat | None" = None,
|
|
1063
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
1064
|
+
**kwargs: Any,
|
|
1065
|
+
) -> "StorageBridgeJob":
|
|
1066
|
+
"""Execute a query and write Arrow-compatible output to storage (async)."""
|
|
1067
|
+
self._require_capability("arrow_export_enabled")
|
|
1068
|
+
arrow_result = await self.select_to_arrow(statement, *parameters, statement_config=statement_config, **kwargs)
|
|
1069
|
+
async_pipeline = self._storage_pipeline()
|
|
1070
|
+
telemetry_payload = await self._write_result_to_storage_async(
|
|
1071
|
+
arrow_result, destination, format_hint=format_hint, pipeline=async_pipeline
|
|
1072
|
+
)
|
|
1073
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
1074
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
1075
|
+
|
|
1076
|
+
async def load_from_arrow(
|
|
1077
|
+
self,
|
|
1078
|
+
table: str,
|
|
1079
|
+
source: "ArrowResult | Any",
|
|
1080
|
+
*,
|
|
1081
|
+
partitioner: "dict[str, object] | None" = None,
|
|
1082
|
+
overwrite: bool = False,
|
|
1083
|
+
telemetry: "StorageTelemetry | None" = None,
|
|
1084
|
+
) -> "StorageBridgeJob":
|
|
1085
|
+
"""Asynchronously load Arrow data into Oracle."""
|
|
1086
|
+
self._require_capability("arrow_import_enabled")
|
|
1087
|
+
arrow_table = self._coerce_arrow_table(source)
|
|
1088
|
+
if overwrite:
|
|
1089
|
+
statement = build_truncate_statement(table)
|
|
1090
|
+
async with self.handle_database_exceptions():
|
|
1091
|
+
await self.connection.execute(statement)
|
|
1092
|
+
columns, records = self._arrow_table_to_rows(arrow_table)
|
|
1093
|
+
if records:
|
|
1094
|
+
statement = build_insert_statement(table, columns)
|
|
1095
|
+
async with self.with_cursor(self.connection) as cursor, self.handle_database_exceptions():
|
|
1096
|
+
await cursor.executemany(statement, records)
|
|
1097
|
+
telemetry_payload = self._build_ingest_telemetry(arrow_table)
|
|
1098
|
+
telemetry_payload["destination"] = table
|
|
1099
|
+
self._attach_partition_telemetry(telemetry_payload, partitioner)
|
|
1100
|
+
return self._create_storage_job(telemetry_payload, telemetry)
|
|
1101
|
+
|
|
1102
|
+
async def load_from_storage(
|
|
1103
|
+
self,
|
|
1104
|
+
table: str,
|
|
1105
|
+
source: "StorageDestination",
|
|
1106
|
+
*,
|
|
1107
|
+
file_format: "StorageFormat",
|
|
1108
|
+
partitioner: "dict[str, object] | None" = None,
|
|
1109
|
+
overwrite: bool = False,
|
|
1110
|
+
) -> "StorageBridgeJob":
|
|
1111
|
+
"""Asynchronously load staged artifacts into Oracle."""
|
|
1112
|
+
arrow_table, inbound = await self._read_arrow_from_storage_async(source, file_format=file_format)
|
|
1113
|
+
return await self.load_from_arrow(
|
|
1114
|
+
table, arrow_table, partitioner=partitioner, overwrite=overwrite, telemetry=inbound
|
|
1115
|
+
)
|
|
1116
|
+
|
|
1117
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1118
|
+
# UTILITY METHODS
|
|
1119
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1120
|
+
|
|
1121
|
+
@property
|
|
1122
|
+
def data_dictionary(self) -> "OracledbAsyncDataDictionary":
|
|
1123
|
+
"""Get the data dictionary for this driver.
|
|
1124
|
+
|
|
1125
|
+
Returns:
|
|
1126
|
+
Data dictionary instance for metadata queries
|
|
1127
|
+
|
|
1128
|
+
"""
|
|
1129
|
+
if self._data_dictionary is None:
|
|
1130
|
+
self._data_dictionary = OracledbAsyncDataDictionary()
|
|
1131
|
+
return self._data_dictionary
|
|
1132
|
+
|
|
1133
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1134
|
+
# PRIVATE/INTERNAL METHODS
|
|
1135
|
+
# ─────────────────────────────────────────────────────────────────────────────
|
|
1136
|
+
|
|
1137
|
+
def _connection_in_transaction(self) -> bool:
|
|
1138
|
+
"""Check if connection is in transaction."""
|
|
1139
|
+
return False
|
|
1140
|
+
|
|
1141
|
+
async def _detect_oracle_version(self) -> "VersionInfo | None":
|
|
1142
|
+
if self._oracle_version is not None:
|
|
1143
|
+
return self._oracle_version
|
|
1144
|
+
version = await self.data_dictionary.get_version(self)
|
|
1145
|
+
self._oracle_version = version
|
|
1146
|
+
return version
|
|
1147
|
+
|
|
1148
|
+
def _detect_oracledb_version(self) -> "tuple[int, int, int]":
|
|
1149
|
+
return ORACLEDB_VERSION
|
|
1150
|
+
|
|
1151
|
+
async def _execute_arrow_dataframe(self, sql: str, parameters: "Any", batch_size: int | None) -> "Any":
|
|
1152
|
+
"""Execute SQL and return an Oracle DataFrame."""
|
|
1153
|
+
params = parameters if parameters is not None else []
|
|
1154
|
+
try:
|
|
1155
|
+
execute_df = self.connection.execute_df
|
|
1156
|
+
except AttributeError:
|
|
1157
|
+
execute_df = None
|
|
1158
|
+
if execute_df is not None:
|
|
1159
|
+
try:
|
|
1160
|
+
return await execute_df(sql, params, arraysize=batch_size or 1000)
|
|
1161
|
+
except TypeError:
|
|
1162
|
+
return await execute_df(sql, params)
|
|
1163
|
+
return await self.connection.fetch_df_all(statement=sql, parameters=params, arraysize=batch_size or 1000)
|
|
1164
|
+
|
|
1165
|
+
async def _execute_stack_native(
|
|
1166
|
+
self, stack: "StatementStack", *, continue_on_error: bool
|
|
1167
|
+
) -> "tuple[StackResult, ...]":
|
|
1168
|
+
compiled_operations = [self._prepare_pipeline_operation(op) for op in stack.operations]
|
|
1169
|
+
pipeline = oracledb.create_pipeline()
|
|
1170
|
+
for compiled in compiled_operations:
|
|
1171
|
+
self._add_pipeline_operation(pipeline, compiled)
|
|
1172
|
+
|
|
1173
|
+
results: list[StackResult] = []
|
|
1174
|
+
started_transaction = False
|
|
1175
|
+
|
|
1176
|
+
with StackExecutionObserver(self, stack, continue_on_error, native_pipeline=True) as observer:
|
|
1177
|
+
try:
|
|
1178
|
+
if not continue_on_error and not self._connection_in_transaction():
|
|
1179
|
+
await self.begin()
|
|
1180
|
+
started_transaction = True
|
|
1181
|
+
|
|
1182
|
+
pipeline_results = await self.connection.run_pipeline(pipeline, continue_on_error=continue_on_error)
|
|
1183
|
+
results = self._build_stack_results_from_pipeline(
|
|
1184
|
+
compiled_operations, pipeline_results, continue_on_error, observer
|
|
1185
|
+
)
|
|
1186
|
+
|
|
1187
|
+
if started_transaction:
|
|
1188
|
+
await self.commit()
|
|
1189
|
+
except Exception as exc:
|
|
1190
|
+
if started_transaction:
|
|
1191
|
+
try:
|
|
1192
|
+
await self.rollback()
|
|
1193
|
+
except Exception as rollback_error: # pragma: no cover - diagnostics only
|
|
1194
|
+
logger.debug("Rollback after pipeline failure failed: %s", rollback_error)
|
|
1195
|
+
raise self._wrap_pipeline_error(exc, stack, continue_on_error) from exc
|
|
1196
|
+
|
|
1197
|
+
return tuple(results)
|
|
1198
|
+
|
|
1199
|
+
async def _pipeline_native_supported(self) -> bool:
|
|
1200
|
+
if self._pipeline_support is not None:
|
|
1201
|
+
return self._pipeline_support
|
|
1202
|
+
|
|
1203
|
+
if self.stack_native_disabled:
|
|
1204
|
+
self._pipeline_support = False
|
|
1205
|
+
self._pipeline_support_reason = "env_override"
|
|
1206
|
+
return False
|
|
1207
|
+
|
|
1208
|
+
if self._detect_oracledb_version() < PIPELINE_MIN_DRIVER_VERSION:
|
|
1209
|
+
self._pipeline_support = False
|
|
1210
|
+
self._pipeline_support_reason = "driver_version"
|
|
1211
|
+
return False
|
|
1212
|
+
|
|
1213
|
+
if not has_pipeline_capability(self.connection):
|
|
1214
|
+
self._pipeline_support = False
|
|
1215
|
+
self._pipeline_support_reason = "driver_api_missing"
|
|
1216
|
+
return False
|
|
1217
|
+
|
|
1218
|
+
version_info = await self._detect_oracle_version()
|
|
1219
|
+
if version_info and version_info.major >= PIPELINE_MIN_DATABASE_MAJOR:
|
|
1220
|
+
self._pipeline_support = True
|
|
1221
|
+
self._pipeline_support_reason = None
|
|
1222
|
+
return True
|
|
1223
|
+
|
|
1224
|
+
self._pipeline_support = False
|
|
1225
|
+
self._pipeline_support_reason = "database_version"
|
|
1226
|
+
return False
|
|
1227
|
+
|
|
1228
|
+
|
|
1229
|
+
register_driver_profile("oracledb", driver_profile)
|