sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +140 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +315 -0
- sqlspec/_typing.py +700 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_typing.py +82 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +1273 -0
- sqlspec/adapters/adbc/config.py +295 -0
- sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/core.py +735 -0
- sqlspec/adapters/adbc/data_dictionary.py +334 -0
- sqlspec/adapters/adbc/driver.py +529 -0
- sqlspec/adapters/adbc/events/__init__.py +5 -0
- sqlspec/adapters/adbc/events/store.py +285 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +502 -0
- sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/type_converter.py +140 -0
- sqlspec/adapters/aiosqlite/__init__.py +25 -0
- sqlspec/adapters/aiosqlite/_typing.py +82 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +818 -0
- sqlspec/adapters/aiosqlite/config.py +334 -0
- sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/aiosqlite/core.py +315 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
- sqlspec/adapters/aiosqlite/driver.py +313 -0
- sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/events/store.py +20 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
- sqlspec/adapters/aiosqlite/pool.py +533 -0
- sqlspec/adapters/asyncmy/__init__.py +21 -0
- sqlspec/adapters/asyncmy/_typing.py +87 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +703 -0
- sqlspec/adapters/asyncmy/config.py +302 -0
- sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncmy/core.py +360 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
- sqlspec/adapters/asyncmy/driver.py +383 -0
- sqlspec/adapters/asyncmy/events/__init__.py +5 -0
- sqlspec/adapters/asyncmy/events/store.py +104 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +19 -0
- sqlspec/adapters/asyncpg/_typing.py +88 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +748 -0
- sqlspec/adapters/asyncpg/config.py +569 -0
- sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncpg/core.py +367 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
- sqlspec/adapters/asyncpg/driver.py +487 -0
- sqlspec/adapters/asyncpg/events/__init__.py +6 -0
- sqlspec/adapters/asyncpg/events/backend.py +286 -0
- sqlspec/adapters/asyncpg/events/store.py +40 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +251 -0
- sqlspec/adapters/bigquery/__init__.py +14 -0
- sqlspec/adapters/bigquery/_typing.py +86 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +827 -0
- sqlspec/adapters/bigquery/config.py +353 -0
- sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/core.py +715 -0
- sqlspec/adapters/bigquery/data_dictionary.py +128 -0
- sqlspec/adapters/bigquery/driver.py +548 -0
- sqlspec/adapters/bigquery/events/__init__.py +5 -0
- sqlspec/adapters/bigquery/events/store.py +139 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +325 -0
- sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/type_converter.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
- sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
- sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
- sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
- sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
- sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
- sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
- sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
- sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
- sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
- sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
- sqlspec/adapters/cockroach_psycopg/config.py +484 -0
- sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_psycopg/core.py +63 -0
- sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
- sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
- sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
- sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
- sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
- sqlspec/adapters/duckdb/__init__.py +25 -0
- sqlspec/adapters/duckdb/_typing.py +81 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +850 -0
- sqlspec/adapters/duckdb/config.py +463 -0
- sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/core.py +257 -0
- sqlspec/adapters/duckdb/data_dictionary.py +140 -0
- sqlspec/adapters/duckdb/driver.py +430 -0
- sqlspec/adapters/duckdb/events/__init__.py +5 -0
- sqlspec/adapters/duckdb/events/store.py +57 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +330 -0
- sqlspec/adapters/duckdb/pool.py +293 -0
- sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/type_converter.py +118 -0
- sqlspec/adapters/mock/__init__.py +72 -0
- sqlspec/adapters/mock/_typing.py +147 -0
- sqlspec/adapters/mock/config.py +483 -0
- sqlspec/adapters/mock/core.py +319 -0
- sqlspec/adapters/mock/data_dictionary.py +366 -0
- sqlspec/adapters/mock/driver.py +721 -0
- sqlspec/adapters/mysqlconnector/__init__.py +36 -0
- sqlspec/adapters/mysqlconnector/_typing.py +141 -0
- sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
- sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
- sqlspec/adapters/mysqlconnector/config.py +394 -0
- sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/mysqlconnector/core.py +303 -0
- sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
- sqlspec/adapters/mysqlconnector/driver.py +483 -0
- sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
- sqlspec/adapters/mysqlconnector/events/store.py +98 -0
- sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
- sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
- sqlspec/adapters/oracledb/__init__.py +60 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
- sqlspec/adapters/oracledb/_typing.py +182 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
- sqlspec/adapters/oracledb/adk/__init__.py +10 -0
- sqlspec/adapters/oracledb/adk/store.py +2369 -0
- sqlspec/adapters/oracledb/config.py +550 -0
- sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/core.py +543 -0
- sqlspec/adapters/oracledb/data_dictionary.py +536 -0
- sqlspec/adapters/oracledb/driver.py +1229 -0
- sqlspec/adapters/oracledb/events/__init__.py +16 -0
- sqlspec/adapters/oracledb/events/backend.py +347 -0
- sqlspec/adapters/oracledb/events/store.py +420 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +781 -0
- sqlspec/adapters/oracledb/migrations.py +535 -0
- sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/type_converter.py +211 -0
- sqlspec/adapters/psqlpy/__init__.py +17 -0
- sqlspec/adapters/psqlpy/_typing.py +79 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +766 -0
- sqlspec/adapters/psqlpy/config.py +304 -0
- sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/core.py +480 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
- sqlspec/adapters/psqlpy/driver.py +438 -0
- sqlspec/adapters/psqlpy/events/__init__.py +6 -0
- sqlspec/adapters/psqlpy/events/backend.py +310 -0
- sqlspec/adapters/psqlpy/events/store.py +20 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +270 -0
- sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/type_converter.py +113 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_typing.py +164 -0
- sqlspec/adapters/psycopg/adk/__init__.py +10 -0
- sqlspec/adapters/psycopg/adk/store.py +1387 -0
- sqlspec/adapters/psycopg/config.py +576 -0
- sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/core.py +450 -0
- sqlspec/adapters/psycopg/data_dictionary.py +289 -0
- sqlspec/adapters/psycopg/driver.py +975 -0
- sqlspec/adapters/psycopg/events/__init__.py +20 -0
- sqlspec/adapters/psycopg/events/backend.py +458 -0
- sqlspec/adapters/psycopg/events/store.py +42 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +552 -0
- sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/type_converter.py +93 -0
- sqlspec/adapters/pymysql/__init__.py +21 -0
- sqlspec/adapters/pymysql/_typing.py +71 -0
- sqlspec/adapters/pymysql/adk/__init__.py +5 -0
- sqlspec/adapters/pymysql/adk/store.py +540 -0
- sqlspec/adapters/pymysql/config.py +195 -0
- sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/pymysql/core.py +299 -0
- sqlspec/adapters/pymysql/data_dictionary.py +122 -0
- sqlspec/adapters/pymysql/driver.py +259 -0
- sqlspec/adapters/pymysql/events/__init__.py +5 -0
- sqlspec/adapters/pymysql/events/store.py +50 -0
- sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
- sqlspec/adapters/pymysql/litestar/store.py +232 -0
- sqlspec/adapters/pymysql/pool.py +137 -0
- sqlspec/adapters/spanner/__init__.py +40 -0
- sqlspec/adapters/spanner/_typing.py +86 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +732 -0
- sqlspec/adapters/spanner/config.py +352 -0
- sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/core.py +188 -0
- sqlspec/adapters/spanner/data_dictionary.py +120 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
- sqlspec/adapters/spanner/driver.py +373 -0
- sqlspec/adapters/spanner/events/__init__.py +5 -0
- sqlspec/adapters/spanner/events/store.py +187 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +291 -0
- sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/type_converter.py +331 -0
- sqlspec/adapters/sqlite/__init__.py +19 -0
- sqlspec/adapters/sqlite/_typing.py +80 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +958 -0
- sqlspec/adapters/sqlite/config.py +280 -0
- sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/core.py +312 -0
- sqlspec/adapters/sqlite/data_dictionary.py +202 -0
- sqlspec/adapters/sqlite/driver.py +359 -0
- sqlspec/adapters/sqlite/events/__init__.py +5 -0
- sqlspec/adapters/sqlite/events/store.py +20 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +316 -0
- sqlspec/adapters/sqlite/pool.py +198 -0
- sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/type_converter.py +114 -0
- sqlspec/base.py +747 -0
- sqlspec/builder/__init__.py +179 -0
- sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_base.py +1022 -0
- sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_column.py +521 -0
- sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_delete.py +95 -0
- sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_dml.py +365 -0
- sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_explain.py +579 -0
- sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_factory.py +1697 -0
- sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_insert.py +328 -0
- sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_join.py +499 -0
- sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_merge.py +821 -0
- sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_select.py +1660 -0
- sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_temporal.py +139 -0
- sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_update.py +173 -0
- sqlspec/builder/_vector_expressions.py +267 -0
- sqlspec/cli.py +911 -0
- sqlspec/config.py +1755 -0
- sqlspec/core/__init__.py +374 -0
- sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/_correlation.py +176 -0
- sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +1069 -0
- sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +954 -0
- sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/explain.py +275 -0
- sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +952 -0
- sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +262 -0
- sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +71 -0
- sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_alignment.py +270 -0
- sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_converter.py +543 -0
- sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_processor.py +505 -0
- sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_registry.py +206 -0
- sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_transformers.py +292 -0
- sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_types.py +499 -0
- sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_validator.py +180 -0
- sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/pipeline.py +319 -0
- sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/query_modifiers.py +437 -0
- sqlspec/core/result/__init__.py +23 -0
- sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_base.py +1121 -0
- sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_io.py +28 -0
- sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +966 -0
- sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +1503 -0
- sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/type_converter.py +339 -0
- sqlspec/data_dictionary/__init__.py +22 -0
- sqlspec/data_dictionary/_loader.py +123 -0
- sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_registry.py +74 -0
- sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_types.py +121 -0
- sqlspec/data_dictionary/dialects/__init__.py +21 -0
- sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/bigquery.py +49 -0
- sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
- sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/duckdb.py +47 -0
- sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/mysql.py +42 -0
- sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/oracle.py +34 -0
- sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/postgres.py +46 -0
- sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/spanner.py +37 -0
- sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/sqlite.py +42 -0
- sqlspec/data_dictionary/sql/.gitkeep +0 -0
- sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
- sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
- sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
- sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
- sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
- sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
- sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
- sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
- sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
- sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
- sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
- sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
- sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
- sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
- sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
- sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
- sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
- sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
- sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
- sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
- sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
- sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
- sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
- sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
- sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
- sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
- sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
- sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
- sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
- sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
- sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
- sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
- sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
- sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
- sqlspec/driver/__init__.py +32 -0
- sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_async.py +1737 -0
- sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_common.py +1478 -0
- sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sql_helpers.py +148 -0
- sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_storage_helpers.py +144 -0
- sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sync.py +1710 -0
- sqlspec/exceptions.py +338 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +70 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/memory/__init__.py +69 -0
- sqlspec/extensions/adk/memory/_types.py +30 -0
- sqlspec/extensions/adk/memory/converters.py +149 -0
- sqlspec/extensions/adk/memory/service.py +217 -0
- sqlspec/extensions/adk/memory/store.py +569 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +225 -0
- sqlspec/extensions/adk/store.py +567 -0
- sqlspec/extensions/events/__init__.py +51 -0
- sqlspec/extensions/events/_channel.py +703 -0
- sqlspec/extensions/events/_hints.py +45 -0
- sqlspec/extensions/events/_models.py +23 -0
- sqlspec/extensions/events/_payload.py +69 -0
- sqlspec/extensions/events/_protocols.py +134 -0
- sqlspec/extensions/events/_queue.py +461 -0
- sqlspec/extensions/events/_store.py +209 -0
- sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
- sqlspec/extensions/events/migrations/__init__.py +3 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +351 -0
- sqlspec/extensions/fastapi/providers.py +607 -0
- sqlspec/extensions/flask/__init__.py +37 -0
- sqlspec/extensions/flask/_state.py +76 -0
- sqlspec/extensions/flask/_utils.py +71 -0
- sqlspec/extensions/flask/extension.py +519 -0
- sqlspec/extensions/litestar/__init__.py +28 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/channels.py +165 -0
- sqlspec/extensions/litestar/cli.py +102 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +671 -0
- sqlspec/extensions/litestar/providers.py +526 -0
- sqlspec/extensions/litestar/store.py +296 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +113 -0
- sqlspec/extensions/starlette/__init__.py +19 -0
- sqlspec/extensions/starlette/_state.py +30 -0
- sqlspec/extensions/starlette/_utils.py +96 -0
- sqlspec/extensions/starlette/extension.py +346 -0
- sqlspec/extensions/starlette/middleware.py +235 -0
- sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +702 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +731 -0
- sqlspec/migrations/commands.py +1232 -0
- sqlspec/migrations/context.py +157 -0
- sqlspec/migrations/fix.py +204 -0
- sqlspec/migrations/loaders.py +443 -0
- sqlspec/migrations/runner.py +1172 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +611 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +207 -0
- sqlspec/migrations/version.py +446 -0
- sqlspec/observability/__init__.py +55 -0
- sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_common.py +77 -0
- sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_config.py +348 -0
- sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_diagnostics.py +74 -0
- sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_dispatcher.py +152 -0
- sqlspec/observability/_formatters/__init__.py +13 -0
- sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_aws.py +102 -0
- sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_azure.py +96 -0
- sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_base.py +57 -0
- sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_gcp.py +131 -0
- sqlspec/observability/_formatting.py +58 -0
- sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_observer.py +357 -0
- sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_runtime.py +420 -0
- sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_sampling.py +188 -0
- sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_spans.py +161 -0
- sqlspec/protocols.py +916 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +48 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +253 -0
- sqlspec/storage/backends/fsspec.py +529 -0
- sqlspec/storage/backends/local.py +441 -0
- sqlspec/storage/backends/obstore.py +916 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +582 -0
- sqlspec/storage/registry.py +301 -0
- sqlspec/typing.py +395 -0
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/arrow_helpers.py +318 -0
- sqlspec/utils/config_tools.py +332 -0
- sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/correlation.py +134 -0
- sqlspec/utils/deprecation.py +190 -0
- sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +258 -0
- sqlspec/utils/logging.py +222 -0
- sqlspec/utils/module_loader.py +306 -0
- sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/portal.py +375 -0
- sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/schema.py +485 -0
- sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/serializers.py +408 -0
- sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +311 -0
- sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_converters.py +128 -0
- sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1360 -0
- sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/uuids.py +225 -0
- sqlspec-0.36.0.dist-info/METADATA +205 -0
- sqlspec-0.36.0.dist-info/RECORD +531 -0
- sqlspec-0.36.0.dist-info/WHEEL +7 -0
- sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,2369 @@
|
|
|
1
|
+
"""Oracle ADK store for Google Agent Development Kit session/event storage."""
|
|
2
|
+
|
|
3
|
+
from decimal import Decimal
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import TYPE_CHECKING, Any, Final, cast
|
|
6
|
+
|
|
7
|
+
import oracledb
|
|
8
|
+
|
|
9
|
+
from sqlspec import SQL
|
|
10
|
+
from sqlspec.adapters.oracledb.data_dictionary import (
|
|
11
|
+
OracledbAsyncDataDictionary,
|
|
12
|
+
OracledbSyncDataDictionary,
|
|
13
|
+
OracleVersionInfo,
|
|
14
|
+
)
|
|
15
|
+
from sqlspec.extensions.adk import BaseAsyncADKStore, BaseSyncADKStore, EventRecord, SessionRecord
|
|
16
|
+
from sqlspec.extensions.adk.memory.store import BaseAsyncADKMemoryStore, BaseSyncADKMemoryStore
|
|
17
|
+
from sqlspec.utils.logging import get_logger
|
|
18
|
+
from sqlspec.utils.serializers import from_json, to_json
|
|
19
|
+
from sqlspec.utils.type_guards import is_async_readable, is_readable
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from datetime import datetime
|
|
23
|
+
|
|
24
|
+
from sqlspec.adapters.oracledb.config import OracleAsyncConfig, OracleSyncConfig
|
|
25
|
+
from sqlspec.extensions.adk import MemoryRecord
|
|
26
|
+
|
|
27
|
+
logger = get_logger("sqlspec.adapters.oracledb.adk.store")
|
|
28
|
+
|
|
29
|
+
__all__ = (
|
|
30
|
+
"JSONStorageType",
|
|
31
|
+
"OracleAsyncADKMemoryStore",
|
|
32
|
+
"OracleAsyncADKStore",
|
|
33
|
+
"OracleSyncADKMemoryStore",
|
|
34
|
+
"OracleSyncADKStore",
|
|
35
|
+
"coerce_decimal_values",
|
|
36
|
+
"storage_type_from_version",
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
ORACLE_TABLE_NOT_FOUND_ERROR: Final = 942
|
|
40
|
+
ORACLE_MIN_JSON_NATIVE_VERSION: Final = 21
|
|
41
|
+
ORACLE_MIN_JSON_NATIVE_COMPATIBLE: Final = 20
|
|
42
|
+
ORACLE_MIN_JSON_BLOB_VERSION: Final = 12
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class JSONStorageType(str, Enum):
|
|
46
|
+
"""JSON storage type based on Oracle version."""
|
|
47
|
+
|
|
48
|
+
JSON_NATIVE = "json"
|
|
49
|
+
BLOB_JSON = "blob_json"
|
|
50
|
+
BLOB_PLAIN = "blob_plain"
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _coerce_decimal_values(value: Any) -> Any:
|
|
54
|
+
if isinstance(value, Decimal):
|
|
55
|
+
return float(value)
|
|
56
|
+
if isinstance(value, dict):
|
|
57
|
+
return {key: _coerce_decimal_values(val) for key, val in value.items()}
|
|
58
|
+
if isinstance(value, list):
|
|
59
|
+
return [_coerce_decimal_values(item) for item in value]
|
|
60
|
+
if isinstance(value, tuple):
|
|
61
|
+
return tuple(_coerce_decimal_values(item) for item in value)
|
|
62
|
+
if isinstance(value, set):
|
|
63
|
+
return {_coerce_decimal_values(item) for item in value}
|
|
64
|
+
if isinstance(value, frozenset):
|
|
65
|
+
return frozenset(_coerce_decimal_values(item) for item in value)
|
|
66
|
+
return value
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _storage_type_from_version(version_info: "OracleVersionInfo | None") -> JSONStorageType:
|
|
70
|
+
"""Determine JSON storage type based on Oracle version metadata."""
|
|
71
|
+
|
|
72
|
+
if version_info and version_info.supports_native_json():
|
|
73
|
+
logger.debug("Detected Oracle %s with compatible >= 20, using JSON_NATIVE", version_info)
|
|
74
|
+
return JSONStorageType.JSON_NATIVE
|
|
75
|
+
|
|
76
|
+
if version_info and version_info.supports_json_blob():
|
|
77
|
+
logger.debug("Detected Oracle %s, using BLOB_JSON (recommended)", version_info)
|
|
78
|
+
return JSONStorageType.BLOB_JSON
|
|
79
|
+
|
|
80
|
+
if version_info:
|
|
81
|
+
logger.debug("Detected Oracle %s (pre-12c), using BLOB_PLAIN", version_info)
|
|
82
|
+
return JSONStorageType.BLOB_PLAIN
|
|
83
|
+
|
|
84
|
+
logger.warning("Oracle version could not be detected; defaulting to BLOB_JSON storage")
|
|
85
|
+
return JSONStorageType.BLOB_JSON
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def coerce_decimal_values(value: Any) -> Any:
|
|
89
|
+
return _coerce_decimal_values(value)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def storage_type_from_version(version_info: "OracleVersionInfo | None") -> JSONStorageType:
|
|
93
|
+
return _storage_type_from_version(version_info)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _to_oracle_bool(value: "bool | None") -> "int | None":
|
|
97
|
+
"""Convert Python boolean to Oracle NUMBER(1).
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
value: Python boolean value or None.
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
1 for True, 0 for False, None for None.
|
|
104
|
+
"""
|
|
105
|
+
if value is None:
|
|
106
|
+
return None
|
|
107
|
+
return 1 if value else 0
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _from_oracle_bool(value: "int | None") -> "bool | None":
|
|
111
|
+
"""Convert Oracle NUMBER(1) to Python boolean.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
value: Oracle NUMBER value (0, 1, or None).
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Python boolean or None.
|
|
118
|
+
"""
|
|
119
|
+
if value is None:
|
|
120
|
+
return None
|
|
121
|
+
return bool(value)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _coerce_bytes_payload(value: Any) -> bytes:
|
|
125
|
+
"""Coerce a LOB payload into bytes."""
|
|
126
|
+
if value is None:
|
|
127
|
+
return b""
|
|
128
|
+
if isinstance(value, bytes):
|
|
129
|
+
return value
|
|
130
|
+
if isinstance(value, str):
|
|
131
|
+
return value.encode("utf-8")
|
|
132
|
+
return str(value).encode("utf-8")
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class OracleAsyncADKStore(BaseAsyncADKStore["OracleAsyncConfig"]):
|
|
136
|
+
"""Oracle async ADK store using oracledb async driver.
|
|
137
|
+
|
|
138
|
+
Implements session and event storage for Google Agent Development Kit
|
|
139
|
+
using Oracle Database via the python-oracledb async driver. Provides:
|
|
140
|
+
- Session state management with version-specific JSON storage
|
|
141
|
+
- Event history tracking with BLOB-serialized actions
|
|
142
|
+
- TIMESTAMP WITH TIME ZONE for timezone-aware timestamps
|
|
143
|
+
- Foreign key constraints with cascade delete
|
|
144
|
+
- Efficient upserts using MERGE statement
|
|
145
|
+
|
|
146
|
+
Args:
|
|
147
|
+
config: OracleAsyncConfig with extension_config["adk"] settings.
|
|
148
|
+
|
|
149
|
+
Example:
|
|
150
|
+
from sqlspec.adapters.oracledb import OracleAsyncConfig
|
|
151
|
+
from sqlspec.adapters.oracledb.adk import OracleAsyncADKStore
|
|
152
|
+
|
|
153
|
+
config = OracleAsyncConfig(
|
|
154
|
+
connection_config={"dsn": "oracle://..."},
|
|
155
|
+
extension_config={
|
|
156
|
+
"adk": {
|
|
157
|
+
"session_table": "my_sessions",
|
|
158
|
+
"events_table": "my_events",
|
|
159
|
+
"owner_id_column": "tenant_id NUMBER(10) REFERENCES tenants(id)"
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
)
|
|
163
|
+
store = OracleAsyncADKStore(config)
|
|
164
|
+
await store.ensure_tables()
|
|
165
|
+
|
|
166
|
+
Notes:
|
|
167
|
+
- JSON storage type detected based on Oracle version (21c+, 12c+, legacy)
|
|
168
|
+
- BLOB for pre-serialized actions from Google ADK
|
|
169
|
+
- TIMESTAMP WITH TIME ZONE for timezone-aware timestamps
|
|
170
|
+
- NUMBER(1) for booleans (0/1/NULL)
|
|
171
|
+
- Named parameters using :param_name
|
|
172
|
+
- State merging handled at application level
|
|
173
|
+
- owner_id_column supports NUMBER, VARCHAR2, RAW for Oracle FK types
|
|
174
|
+
- Configuration is read from config.extension_config["adk"]
|
|
175
|
+
"""
|
|
176
|
+
|
|
177
|
+
__slots__ = ("_in_memory", "_json_storage_type", "_oracle_version_info")
|
|
178
|
+
|
|
179
|
+
def __init__(self, config: "OracleAsyncConfig") -> None:
|
|
180
|
+
"""Initialize Oracle ADK store.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
config: OracleAsyncConfig instance.
|
|
184
|
+
|
|
185
|
+
Notes:
|
|
186
|
+
Configuration is read from config.extension_config["adk"]:
|
|
187
|
+
- session_table: Sessions table name (default: "adk_sessions")
|
|
188
|
+
- events_table: Events table name (default: "adk_events")
|
|
189
|
+
- owner_id_column: Optional owner FK column DDL (default: None)
|
|
190
|
+
- in_memory: Enable INMEMORY PRIORITY HIGH clause (default: False)
|
|
191
|
+
"""
|
|
192
|
+
super().__init__(config)
|
|
193
|
+
self._json_storage_type: JSONStorageType | None = None
|
|
194
|
+
self._oracle_version_info: OracleVersionInfo | None = None
|
|
195
|
+
|
|
196
|
+
adk_config = config.extension_config.get("adk", {})
|
|
197
|
+
self._in_memory: bool = bool(adk_config.get("in_memory", False))
|
|
198
|
+
|
|
199
|
+
async def _get_create_sessions_table_sql(self) -> str:
|
|
200
|
+
"""Get Oracle CREATE TABLE SQL for sessions table.
|
|
201
|
+
|
|
202
|
+
Auto-detects optimal JSON storage type based on Oracle version.
|
|
203
|
+
Result is cached to minimize database queries.
|
|
204
|
+
"""
|
|
205
|
+
storage_type = await self._detect_json_storage_type()
|
|
206
|
+
return self._get_create_sessions_table_sql_for_type(storage_type)
|
|
207
|
+
|
|
208
|
+
async def _get_create_events_table_sql(self) -> str:
|
|
209
|
+
"""Get Oracle CREATE TABLE SQL for events table.
|
|
210
|
+
|
|
211
|
+
Auto-detects optimal JSON storage type based on Oracle version.
|
|
212
|
+
Result is cached to minimize database queries.
|
|
213
|
+
"""
|
|
214
|
+
storage_type = await self._detect_json_storage_type()
|
|
215
|
+
return self._get_create_events_table_sql_for_type(storage_type)
|
|
216
|
+
|
|
217
|
+
async def _detect_json_storage_type(self) -> JSONStorageType:
|
|
218
|
+
"""Detect the appropriate JSON storage type based on Oracle version.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
Appropriate JSONStorageType for this Oracle version.
|
|
222
|
+
|
|
223
|
+
Notes:
|
|
224
|
+
Queries product_component_version to determine Oracle version.
|
|
225
|
+
- Oracle 21c+ with compatible >= 20: Native JSON type
|
|
226
|
+
- Oracle 12c+: BLOB with IS JSON constraint (preferred)
|
|
227
|
+
- Oracle 11g and earlier: BLOB without constraint
|
|
228
|
+
|
|
229
|
+
BLOB is preferred over CLOB for 12c+ as per Oracle recommendations.
|
|
230
|
+
Result is cached in self._json_storage_type.
|
|
231
|
+
"""
|
|
232
|
+
if self._json_storage_type is not None:
|
|
233
|
+
return self._json_storage_type
|
|
234
|
+
|
|
235
|
+
version_info = await self._get_version_info()
|
|
236
|
+
self._json_storage_type = _storage_type_from_version(version_info)
|
|
237
|
+
return self._json_storage_type
|
|
238
|
+
|
|
239
|
+
async def _get_version_info(self) -> "OracleVersionInfo | None":
|
|
240
|
+
"""Return cached Oracle version info using Oracle data dictionary."""
|
|
241
|
+
|
|
242
|
+
if self._oracle_version_info is not None:
|
|
243
|
+
return self._oracle_version_info
|
|
244
|
+
|
|
245
|
+
async with self._config.provide_session() as driver:
|
|
246
|
+
dictionary = OracledbAsyncDataDictionary()
|
|
247
|
+
self._oracle_version_info = await dictionary.get_version(driver)
|
|
248
|
+
|
|
249
|
+
if self._oracle_version_info is None:
|
|
250
|
+
logger.warning("Could not detect Oracle version, defaulting to BLOB_JSON storage")
|
|
251
|
+
|
|
252
|
+
return self._oracle_version_info
|
|
253
|
+
|
|
254
|
+
async def _serialize_state(self, state: "dict[str, Any]") -> "str | bytes":
|
|
255
|
+
"""Serialize state dictionary to appropriate format based on storage type.
|
|
256
|
+
|
|
257
|
+
Args:
|
|
258
|
+
state: State dictionary to serialize.
|
|
259
|
+
|
|
260
|
+
Returns:
|
|
261
|
+
JSON string for JSON_NATIVE, bytes for BLOB types.
|
|
262
|
+
"""
|
|
263
|
+
storage_type = await self._detect_json_storage_type()
|
|
264
|
+
|
|
265
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
266
|
+
return to_json(state)
|
|
267
|
+
|
|
268
|
+
return to_json(state, as_bytes=True)
|
|
269
|
+
|
|
270
|
+
async def _deserialize_state(self, data: Any) -> "dict[str, Any]":
|
|
271
|
+
"""Deserialize state data from database format.
|
|
272
|
+
|
|
273
|
+
Args:
|
|
274
|
+
data: Data from database (may be LOB, str, bytes, or dict).
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
Deserialized state dictionary.
|
|
278
|
+
|
|
279
|
+
Notes:
|
|
280
|
+
Handles LOB reading if data has read() method.
|
|
281
|
+
Oracle JSON type may return dict directly.
|
|
282
|
+
"""
|
|
283
|
+
if is_async_readable(data):
|
|
284
|
+
data = await data.read()
|
|
285
|
+
elif is_readable(data):
|
|
286
|
+
data = data.read()
|
|
287
|
+
|
|
288
|
+
if isinstance(data, dict):
|
|
289
|
+
return cast("dict[str, Any]", _coerce_decimal_values(data))
|
|
290
|
+
|
|
291
|
+
if isinstance(data, bytes):
|
|
292
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
293
|
+
|
|
294
|
+
if isinstance(data, str):
|
|
295
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
296
|
+
|
|
297
|
+
return from_json(str(data)) # type: ignore[no-any-return]
|
|
298
|
+
|
|
299
|
+
async def _serialize_json_field(self, value: Any) -> "str | bytes | None":
|
|
300
|
+
"""Serialize optional JSON field for event storage.
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
value: Value to serialize (dict or None).
|
|
304
|
+
|
|
305
|
+
Returns:
|
|
306
|
+
Serialized JSON or None.
|
|
307
|
+
"""
|
|
308
|
+
if value is None:
|
|
309
|
+
return None
|
|
310
|
+
|
|
311
|
+
storage_type = await self._detect_json_storage_type()
|
|
312
|
+
|
|
313
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
314
|
+
return to_json(value)
|
|
315
|
+
|
|
316
|
+
return to_json(value, as_bytes=True)
|
|
317
|
+
|
|
318
|
+
async def _deserialize_json_field(self, data: Any) -> "dict[str, Any] | None":
|
|
319
|
+
"""Deserialize optional JSON field from database.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
data: Data from database (may be LOB, str, bytes, dict, or None).
|
|
323
|
+
|
|
324
|
+
Returns:
|
|
325
|
+
Deserialized dictionary or None.
|
|
326
|
+
|
|
327
|
+
Notes:
|
|
328
|
+
Oracle JSON type may return dict directly.
|
|
329
|
+
"""
|
|
330
|
+
if data is None:
|
|
331
|
+
return None
|
|
332
|
+
|
|
333
|
+
if is_async_readable(data):
|
|
334
|
+
data = await data.read()
|
|
335
|
+
elif is_readable(data):
|
|
336
|
+
data = data.read()
|
|
337
|
+
|
|
338
|
+
if isinstance(data, dict):
|
|
339
|
+
return cast("dict[str, Any]", _coerce_decimal_values(data))
|
|
340
|
+
|
|
341
|
+
if isinstance(data, bytes):
|
|
342
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
343
|
+
|
|
344
|
+
if isinstance(data, str):
|
|
345
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
346
|
+
|
|
347
|
+
return from_json(str(data)) # type: ignore[no-any-return]
|
|
348
|
+
|
|
349
|
+
def _get_create_sessions_table_sql_for_type(self, storage_type: JSONStorageType) -> str:
|
|
350
|
+
"""Get Oracle CREATE TABLE SQL for sessions with specified storage type.
|
|
351
|
+
|
|
352
|
+
Args:
|
|
353
|
+
storage_type: JSON storage type to use.
|
|
354
|
+
|
|
355
|
+
Returns:
|
|
356
|
+
SQL statement to create adk_sessions table.
|
|
357
|
+
"""
|
|
358
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
359
|
+
state_column = "state JSON NOT NULL"
|
|
360
|
+
elif storage_type == JSONStorageType.BLOB_JSON:
|
|
361
|
+
state_column = "state BLOB CHECK (state IS JSON) NOT NULL"
|
|
362
|
+
else:
|
|
363
|
+
state_column = "state BLOB NOT NULL"
|
|
364
|
+
|
|
365
|
+
owner_id_column_sql = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
366
|
+
inmemory_clause = " INMEMORY PRIORITY HIGH" if self._in_memory else ""
|
|
367
|
+
|
|
368
|
+
return f"""
|
|
369
|
+
BEGIN
|
|
370
|
+
EXECUTE IMMEDIATE 'CREATE TABLE {self._session_table} (
|
|
371
|
+
id VARCHAR2(128) PRIMARY KEY,
|
|
372
|
+
app_name VARCHAR2(128) NOT NULL,
|
|
373
|
+
user_id VARCHAR2(128) NOT NULL,
|
|
374
|
+
{state_column},
|
|
375
|
+
create_time TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL,
|
|
376
|
+
update_time TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL{owner_id_column_sql}
|
|
377
|
+
){inmemory_clause}';
|
|
378
|
+
EXCEPTION
|
|
379
|
+
WHEN OTHERS THEN
|
|
380
|
+
IF SQLCODE != -955 THEN
|
|
381
|
+
RAISE;
|
|
382
|
+
END IF;
|
|
383
|
+
END;
|
|
384
|
+
|
|
385
|
+
BEGIN
|
|
386
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._session_table}_app_user
|
|
387
|
+
ON {self._session_table}(app_name, user_id)';
|
|
388
|
+
EXCEPTION
|
|
389
|
+
WHEN OTHERS THEN
|
|
390
|
+
IF SQLCODE != -955 THEN
|
|
391
|
+
RAISE;
|
|
392
|
+
END IF;
|
|
393
|
+
END;
|
|
394
|
+
|
|
395
|
+
BEGIN
|
|
396
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._session_table}_update_time
|
|
397
|
+
ON {self._session_table}(update_time DESC)';
|
|
398
|
+
EXCEPTION
|
|
399
|
+
WHEN OTHERS THEN
|
|
400
|
+
IF SQLCODE != -955 THEN
|
|
401
|
+
RAISE;
|
|
402
|
+
END IF;
|
|
403
|
+
END;
|
|
404
|
+
"""
|
|
405
|
+
|
|
406
|
+
def _get_create_events_table_sql_for_type(self, storage_type: JSONStorageType) -> str:
|
|
407
|
+
"""Get Oracle CREATE TABLE SQL for events with specified storage type.
|
|
408
|
+
|
|
409
|
+
Args:
|
|
410
|
+
storage_type: JSON storage type to use.
|
|
411
|
+
|
|
412
|
+
Returns:
|
|
413
|
+
SQL statement to create adk_events table.
|
|
414
|
+
"""
|
|
415
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
416
|
+
json_columns = """
|
|
417
|
+
content JSON,
|
|
418
|
+
grounding_metadata JSON,
|
|
419
|
+
custom_metadata JSON,
|
|
420
|
+
long_running_tool_ids_json JSON
|
|
421
|
+
"""
|
|
422
|
+
elif storage_type == JSONStorageType.BLOB_JSON:
|
|
423
|
+
json_columns = """
|
|
424
|
+
content BLOB CHECK (content IS JSON),
|
|
425
|
+
grounding_metadata BLOB CHECK (grounding_metadata IS JSON),
|
|
426
|
+
custom_metadata BLOB CHECK (custom_metadata IS JSON),
|
|
427
|
+
long_running_tool_ids_json BLOB CHECK (long_running_tool_ids_json IS JSON)
|
|
428
|
+
"""
|
|
429
|
+
else:
|
|
430
|
+
json_columns = """
|
|
431
|
+
content BLOB,
|
|
432
|
+
grounding_metadata BLOB,
|
|
433
|
+
custom_metadata BLOB,
|
|
434
|
+
long_running_tool_ids_json BLOB
|
|
435
|
+
"""
|
|
436
|
+
|
|
437
|
+
inmemory_clause = " INMEMORY PRIORITY HIGH" if self._in_memory else ""
|
|
438
|
+
|
|
439
|
+
return f"""
|
|
440
|
+
BEGIN
|
|
441
|
+
EXECUTE IMMEDIATE 'CREATE TABLE {self._events_table} (
|
|
442
|
+
id VARCHAR2(128) PRIMARY KEY,
|
|
443
|
+
session_id VARCHAR2(128) NOT NULL,
|
|
444
|
+
app_name VARCHAR2(128) NOT NULL,
|
|
445
|
+
user_id VARCHAR2(128) NOT NULL,
|
|
446
|
+
invocation_id VARCHAR2(256),
|
|
447
|
+
author VARCHAR2(256),
|
|
448
|
+
actions BLOB,
|
|
449
|
+
branch VARCHAR2(256),
|
|
450
|
+
timestamp TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL,
|
|
451
|
+
{json_columns},
|
|
452
|
+
partial NUMBER(1),
|
|
453
|
+
turn_complete NUMBER(1),
|
|
454
|
+
interrupted NUMBER(1),
|
|
455
|
+
error_code VARCHAR2(256),
|
|
456
|
+
error_message VARCHAR2(1024),
|
|
457
|
+
CONSTRAINT fk_{self._events_table}_session FOREIGN KEY (session_id)
|
|
458
|
+
REFERENCES {self._session_table}(id) ON DELETE CASCADE
|
|
459
|
+
){inmemory_clause}';
|
|
460
|
+
EXCEPTION
|
|
461
|
+
WHEN OTHERS THEN
|
|
462
|
+
IF SQLCODE != -955 THEN
|
|
463
|
+
RAISE;
|
|
464
|
+
END IF;
|
|
465
|
+
END;
|
|
466
|
+
|
|
467
|
+
BEGIN
|
|
468
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._events_table}_session
|
|
469
|
+
ON {self._events_table}(session_id, timestamp ASC)';
|
|
470
|
+
EXCEPTION
|
|
471
|
+
WHEN OTHERS THEN
|
|
472
|
+
IF SQLCODE != -955 THEN
|
|
473
|
+
RAISE;
|
|
474
|
+
END IF;
|
|
475
|
+
END;
|
|
476
|
+
"""
|
|
477
|
+
|
|
478
|
+
def _get_drop_tables_sql(self) -> "list[str]":
|
|
479
|
+
"""Get Oracle DROP TABLE SQL statements.
|
|
480
|
+
|
|
481
|
+
Returns:
|
|
482
|
+
List of SQL statements to drop tables and indexes.
|
|
483
|
+
|
|
484
|
+
Notes:
|
|
485
|
+
Order matters: drop events table (child) before sessions (parent).
|
|
486
|
+
Oracle automatically drops indexes when dropping tables.
|
|
487
|
+
"""
|
|
488
|
+
return [
|
|
489
|
+
f"""
|
|
490
|
+
BEGIN
|
|
491
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._events_table}_session';
|
|
492
|
+
EXCEPTION
|
|
493
|
+
WHEN OTHERS THEN
|
|
494
|
+
IF SQLCODE != -1418 THEN
|
|
495
|
+
RAISE;
|
|
496
|
+
END IF;
|
|
497
|
+
END;
|
|
498
|
+
""",
|
|
499
|
+
f"""
|
|
500
|
+
BEGIN
|
|
501
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._session_table}_update_time';
|
|
502
|
+
EXCEPTION
|
|
503
|
+
WHEN OTHERS THEN
|
|
504
|
+
IF SQLCODE != -1418 THEN
|
|
505
|
+
RAISE;
|
|
506
|
+
END IF;
|
|
507
|
+
END;
|
|
508
|
+
""",
|
|
509
|
+
f"""
|
|
510
|
+
BEGIN
|
|
511
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._session_table}_app_user';
|
|
512
|
+
EXCEPTION
|
|
513
|
+
WHEN OTHERS THEN
|
|
514
|
+
IF SQLCODE != -1418 THEN
|
|
515
|
+
RAISE;
|
|
516
|
+
END IF;
|
|
517
|
+
END;
|
|
518
|
+
""",
|
|
519
|
+
f"""
|
|
520
|
+
BEGIN
|
|
521
|
+
EXECUTE IMMEDIATE 'DROP TABLE {self._events_table}';
|
|
522
|
+
EXCEPTION
|
|
523
|
+
WHEN OTHERS THEN
|
|
524
|
+
IF SQLCODE != -942 THEN
|
|
525
|
+
RAISE;
|
|
526
|
+
END IF;
|
|
527
|
+
END;
|
|
528
|
+
""",
|
|
529
|
+
f"""
|
|
530
|
+
BEGIN
|
|
531
|
+
EXECUTE IMMEDIATE 'DROP TABLE {self._session_table}';
|
|
532
|
+
EXCEPTION
|
|
533
|
+
WHEN OTHERS THEN
|
|
534
|
+
IF SQLCODE != -942 THEN
|
|
535
|
+
RAISE;
|
|
536
|
+
END IF;
|
|
537
|
+
END;
|
|
538
|
+
""",
|
|
539
|
+
]
|
|
540
|
+
|
|
541
|
+
async def create_tables(self) -> None:
|
|
542
|
+
"""Create both sessions and events tables if they don't exist.
|
|
543
|
+
|
|
544
|
+
Notes:
|
|
545
|
+
Detects Oracle version to determine optimal JSON storage type.
|
|
546
|
+
Uses version-appropriate table schema.
|
|
547
|
+
"""
|
|
548
|
+
storage_type = await self._detect_json_storage_type()
|
|
549
|
+
logger.debug("Creating ADK tables with storage type: %s", storage_type)
|
|
550
|
+
|
|
551
|
+
async with self._config.provide_session() as driver:
|
|
552
|
+
await driver.execute_script(self._get_create_sessions_table_sql_for_type(storage_type))
|
|
553
|
+
|
|
554
|
+
await driver.execute_script(self._get_create_events_table_sql_for_type(storage_type))
|
|
555
|
+
|
|
556
|
+
async def create_session(
|
|
557
|
+
self, session_id: str, app_name: str, user_id: str, state: "dict[str, Any]", owner_id: "Any | None" = None
|
|
558
|
+
) -> SessionRecord:
|
|
559
|
+
"""Create a new session.
|
|
560
|
+
|
|
561
|
+
Args:
|
|
562
|
+
session_id: Unique session identifier.
|
|
563
|
+
app_name: Application name.
|
|
564
|
+
user_id: User identifier.
|
|
565
|
+
state: Initial session state.
|
|
566
|
+
owner_id: Optional owner ID value for owner_id_column (if configured).
|
|
567
|
+
|
|
568
|
+
Returns:
|
|
569
|
+
Created session record.
|
|
570
|
+
|
|
571
|
+
Notes:
|
|
572
|
+
Uses SYSTIMESTAMP for create_time and update_time.
|
|
573
|
+
State is serialized using version-appropriate format.
|
|
574
|
+
owner_id is ignored if owner_id_column not configured.
|
|
575
|
+
"""
|
|
576
|
+
state_data = await self._serialize_state(state)
|
|
577
|
+
|
|
578
|
+
if self._owner_id_column_name:
|
|
579
|
+
sql = f"""
|
|
580
|
+
INSERT INTO {self._session_table} (id, app_name, user_id, state, create_time, update_time, {self._owner_id_column_name})
|
|
581
|
+
VALUES (:id, :app_name, :user_id, :state, SYSTIMESTAMP, SYSTIMESTAMP, :owner_id)
|
|
582
|
+
"""
|
|
583
|
+
params = {
|
|
584
|
+
"id": session_id,
|
|
585
|
+
"app_name": app_name,
|
|
586
|
+
"user_id": user_id,
|
|
587
|
+
"state": state_data,
|
|
588
|
+
"owner_id": owner_id,
|
|
589
|
+
}
|
|
590
|
+
else:
|
|
591
|
+
sql = f"""
|
|
592
|
+
INSERT INTO {self._session_table} (id, app_name, user_id, state, create_time, update_time)
|
|
593
|
+
VALUES (:id, :app_name, :user_id, :state, SYSTIMESTAMP, SYSTIMESTAMP)
|
|
594
|
+
"""
|
|
595
|
+
params = {"id": session_id, "app_name": app_name, "user_id": user_id, "state": state_data}
|
|
596
|
+
|
|
597
|
+
async with self._config.provide_connection() as conn:
|
|
598
|
+
cursor = conn.cursor()
|
|
599
|
+
await cursor.execute(sql, params)
|
|
600
|
+
await conn.commit()
|
|
601
|
+
|
|
602
|
+
return await self.get_session(session_id) # type: ignore[return-value]
|
|
603
|
+
|
|
604
|
+
async def get_session(self, session_id: str) -> "SessionRecord | None":
|
|
605
|
+
"""Get session by ID.
|
|
606
|
+
|
|
607
|
+
Args:
|
|
608
|
+
session_id: Session identifier.
|
|
609
|
+
|
|
610
|
+
Returns:
|
|
611
|
+
Session record or None if not found.
|
|
612
|
+
|
|
613
|
+
Notes:
|
|
614
|
+
Oracle returns datetime objects for TIMESTAMP columns.
|
|
615
|
+
State is deserialized using version-appropriate format.
|
|
616
|
+
"""
|
|
617
|
+
|
|
618
|
+
try:
|
|
619
|
+
async with self._config.provide_connection() as conn:
|
|
620
|
+
cursor = conn.cursor()
|
|
621
|
+
await cursor.execute(
|
|
622
|
+
f"""
|
|
623
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
624
|
+
FROM {self._session_table}
|
|
625
|
+
WHERE id = :id
|
|
626
|
+
""",
|
|
627
|
+
{"id": session_id},
|
|
628
|
+
)
|
|
629
|
+
row = await cursor.fetchone()
|
|
630
|
+
|
|
631
|
+
if row is None:
|
|
632
|
+
return None
|
|
633
|
+
|
|
634
|
+
session_id_val, app_name, user_id, state_data, create_time, update_time = row
|
|
635
|
+
|
|
636
|
+
state = await self._deserialize_state(state_data)
|
|
637
|
+
|
|
638
|
+
return SessionRecord(
|
|
639
|
+
id=session_id_val,
|
|
640
|
+
app_name=app_name,
|
|
641
|
+
user_id=user_id,
|
|
642
|
+
state=state,
|
|
643
|
+
create_time=create_time,
|
|
644
|
+
update_time=update_time,
|
|
645
|
+
)
|
|
646
|
+
except oracledb.DatabaseError as e:
|
|
647
|
+
error_obj = e.args[0] if e.args else None
|
|
648
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
649
|
+
return None
|
|
650
|
+
raise
|
|
651
|
+
|
|
652
|
+
async def update_session_state(self, session_id: str, state: "dict[str, Any]") -> None:
|
|
653
|
+
"""Update session state.
|
|
654
|
+
|
|
655
|
+
Args:
|
|
656
|
+
session_id: Session identifier.
|
|
657
|
+
state: New state dictionary (replaces existing state).
|
|
658
|
+
|
|
659
|
+
Notes:
|
|
660
|
+
This replaces the entire state dictionary.
|
|
661
|
+
Updates update_time to current timestamp.
|
|
662
|
+
State is serialized using version-appropriate format.
|
|
663
|
+
"""
|
|
664
|
+
state_data = await self._serialize_state(state)
|
|
665
|
+
|
|
666
|
+
sql = f"""
|
|
667
|
+
UPDATE {self._session_table}
|
|
668
|
+
SET state = :state, update_time = SYSTIMESTAMP
|
|
669
|
+
WHERE id = :id
|
|
670
|
+
"""
|
|
671
|
+
|
|
672
|
+
async with self._config.provide_connection() as conn:
|
|
673
|
+
cursor = conn.cursor()
|
|
674
|
+
await cursor.execute(sql, {"state": state_data, "id": session_id})
|
|
675
|
+
await conn.commit()
|
|
676
|
+
|
|
677
|
+
async def delete_session(self, session_id: str) -> None:
|
|
678
|
+
"""Delete session and all associated events (cascade).
|
|
679
|
+
|
|
680
|
+
Args:
|
|
681
|
+
session_id: Session identifier.
|
|
682
|
+
|
|
683
|
+
Notes:
|
|
684
|
+
Foreign key constraint ensures events are cascade-deleted.
|
|
685
|
+
"""
|
|
686
|
+
sql = f"DELETE FROM {self._session_table} WHERE id = :id"
|
|
687
|
+
|
|
688
|
+
async with self._config.provide_connection() as conn:
|
|
689
|
+
cursor = conn.cursor()
|
|
690
|
+
await cursor.execute(sql, {"id": session_id})
|
|
691
|
+
await conn.commit()
|
|
692
|
+
|
|
693
|
+
async def list_sessions(self, app_name: str, user_id: str | None = None) -> "list[SessionRecord]":
|
|
694
|
+
"""List sessions for an app, optionally filtered by user.
|
|
695
|
+
|
|
696
|
+
Args:
|
|
697
|
+
app_name: Application name.
|
|
698
|
+
user_id: User identifier. If None, lists all sessions for the app.
|
|
699
|
+
|
|
700
|
+
Returns:
|
|
701
|
+
List of session records ordered by update_time DESC.
|
|
702
|
+
|
|
703
|
+
Notes:
|
|
704
|
+
Uses composite index on (app_name, user_id) when user_id is provided.
|
|
705
|
+
State is deserialized using version-appropriate format.
|
|
706
|
+
"""
|
|
707
|
+
|
|
708
|
+
if user_id is None:
|
|
709
|
+
sql = f"""
|
|
710
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
711
|
+
FROM {self._session_table}
|
|
712
|
+
WHERE app_name = :app_name
|
|
713
|
+
ORDER BY update_time DESC
|
|
714
|
+
"""
|
|
715
|
+
params = {"app_name": app_name}
|
|
716
|
+
else:
|
|
717
|
+
sql = f"""
|
|
718
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
719
|
+
FROM {self._session_table}
|
|
720
|
+
WHERE app_name = :app_name AND user_id = :user_id
|
|
721
|
+
ORDER BY update_time DESC
|
|
722
|
+
"""
|
|
723
|
+
params = {"app_name": app_name, "user_id": user_id}
|
|
724
|
+
|
|
725
|
+
try:
|
|
726
|
+
async with self._config.provide_connection() as conn:
|
|
727
|
+
cursor = conn.cursor()
|
|
728
|
+
await cursor.execute(sql, params)
|
|
729
|
+
rows = await cursor.fetchall()
|
|
730
|
+
|
|
731
|
+
results = []
|
|
732
|
+
for row in rows:
|
|
733
|
+
state = await self._deserialize_state(row[3])
|
|
734
|
+
|
|
735
|
+
results.append(
|
|
736
|
+
SessionRecord(
|
|
737
|
+
id=row[0],
|
|
738
|
+
app_name=row[1],
|
|
739
|
+
user_id=row[2],
|
|
740
|
+
state=state,
|
|
741
|
+
create_time=row[4],
|
|
742
|
+
update_time=row[5],
|
|
743
|
+
)
|
|
744
|
+
)
|
|
745
|
+
return results
|
|
746
|
+
except oracledb.DatabaseError as e:
|
|
747
|
+
error_obj = e.args[0] if e.args else None
|
|
748
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
749
|
+
return []
|
|
750
|
+
raise
|
|
751
|
+
|
|
752
|
+
async def append_event(self, event_record: EventRecord) -> None:
|
|
753
|
+
"""Append an event to a session.
|
|
754
|
+
|
|
755
|
+
Args:
|
|
756
|
+
event_record: Event record to store.
|
|
757
|
+
|
|
758
|
+
Notes:
|
|
759
|
+
Uses SYSTIMESTAMP for timestamp if not provided.
|
|
760
|
+
JSON fields are serialized using version-appropriate format.
|
|
761
|
+
Boolean fields are converted to NUMBER(1).
|
|
762
|
+
"""
|
|
763
|
+
content_data = await self._serialize_json_field(event_record.get("content"))
|
|
764
|
+
grounding_metadata_data = await self._serialize_json_field(event_record.get("grounding_metadata"))
|
|
765
|
+
custom_metadata_data = await self._serialize_json_field(event_record.get("custom_metadata"))
|
|
766
|
+
|
|
767
|
+
sql = f"""
|
|
768
|
+
INSERT INTO {self._events_table} (
|
|
769
|
+
id, session_id, app_name, user_id, invocation_id, author, actions,
|
|
770
|
+
long_running_tool_ids_json, branch, timestamp, content,
|
|
771
|
+
grounding_metadata, custom_metadata, partial, turn_complete,
|
|
772
|
+
interrupted, error_code, error_message
|
|
773
|
+
) VALUES (
|
|
774
|
+
:id, :session_id, :app_name, :user_id, :invocation_id, :author, :actions,
|
|
775
|
+
:long_running_tool_ids_json, :branch, :timestamp, :content,
|
|
776
|
+
:grounding_metadata, :custom_metadata, :partial, :turn_complete,
|
|
777
|
+
:interrupted, :error_code, :error_message
|
|
778
|
+
)
|
|
779
|
+
"""
|
|
780
|
+
|
|
781
|
+
async with self._config.provide_connection() as conn:
|
|
782
|
+
cursor = conn.cursor()
|
|
783
|
+
await cursor.execute(
|
|
784
|
+
sql,
|
|
785
|
+
{
|
|
786
|
+
"id": event_record["id"],
|
|
787
|
+
"session_id": event_record["session_id"],
|
|
788
|
+
"app_name": event_record["app_name"],
|
|
789
|
+
"user_id": event_record["user_id"],
|
|
790
|
+
"invocation_id": event_record["invocation_id"],
|
|
791
|
+
"author": event_record["author"],
|
|
792
|
+
"actions": event_record["actions"],
|
|
793
|
+
"long_running_tool_ids_json": event_record.get("long_running_tool_ids_json"),
|
|
794
|
+
"branch": event_record.get("branch"),
|
|
795
|
+
"timestamp": event_record["timestamp"],
|
|
796
|
+
"content": content_data,
|
|
797
|
+
"grounding_metadata": grounding_metadata_data,
|
|
798
|
+
"custom_metadata": custom_metadata_data,
|
|
799
|
+
"partial": _to_oracle_bool(event_record.get("partial")),
|
|
800
|
+
"turn_complete": _to_oracle_bool(event_record.get("turn_complete")),
|
|
801
|
+
"interrupted": _to_oracle_bool(event_record.get("interrupted")),
|
|
802
|
+
"error_code": event_record.get("error_code"),
|
|
803
|
+
"error_message": event_record.get("error_message"),
|
|
804
|
+
},
|
|
805
|
+
)
|
|
806
|
+
await conn.commit()
|
|
807
|
+
|
|
808
|
+
async def get_events(
|
|
809
|
+
self, session_id: str, after_timestamp: "datetime | None" = None, limit: "int | None" = None
|
|
810
|
+
) -> "list[EventRecord]":
|
|
811
|
+
"""Get events for a session.
|
|
812
|
+
|
|
813
|
+
Args:
|
|
814
|
+
session_id: Session identifier.
|
|
815
|
+
after_timestamp: Only return events after this time.
|
|
816
|
+
limit: Maximum number of events to return.
|
|
817
|
+
|
|
818
|
+
Returns:
|
|
819
|
+
List of event records ordered by timestamp ASC.
|
|
820
|
+
|
|
821
|
+
Notes:
|
|
822
|
+
Uses index on (session_id, timestamp ASC).
|
|
823
|
+
JSON fields deserialized using version-appropriate format.
|
|
824
|
+
Converts BLOB actions to bytes and NUMBER(1) booleans to Python bool.
|
|
825
|
+
"""
|
|
826
|
+
|
|
827
|
+
where_clauses = ["session_id = :session_id"]
|
|
828
|
+
params: dict[str, Any] = {"session_id": session_id}
|
|
829
|
+
|
|
830
|
+
if after_timestamp is not None:
|
|
831
|
+
where_clauses.append("timestamp > :after_timestamp")
|
|
832
|
+
params["after_timestamp"] = after_timestamp
|
|
833
|
+
|
|
834
|
+
where_clause = " AND ".join(where_clauses)
|
|
835
|
+
limit_clause = ""
|
|
836
|
+
if limit:
|
|
837
|
+
limit_clause = f" FETCH FIRST {limit} ROWS ONLY"
|
|
838
|
+
|
|
839
|
+
sql = f"""
|
|
840
|
+
SELECT id, session_id, app_name, user_id, invocation_id, author, actions,
|
|
841
|
+
long_running_tool_ids_json, branch, timestamp, content,
|
|
842
|
+
grounding_metadata, custom_metadata, partial, turn_complete,
|
|
843
|
+
interrupted, error_code, error_message
|
|
844
|
+
FROM {self._events_table}
|
|
845
|
+
WHERE {where_clause}
|
|
846
|
+
ORDER BY timestamp ASC{limit_clause}
|
|
847
|
+
"""
|
|
848
|
+
|
|
849
|
+
try:
|
|
850
|
+
async with self._config.provide_connection() as conn:
|
|
851
|
+
cursor = conn.cursor()
|
|
852
|
+
await cursor.execute(sql, params)
|
|
853
|
+
rows = await cursor.fetchall()
|
|
854
|
+
|
|
855
|
+
results = []
|
|
856
|
+
for row in rows:
|
|
857
|
+
actions_blob = row[6]
|
|
858
|
+
if is_async_readable(actions_blob):
|
|
859
|
+
actions_data = await actions_blob.read()
|
|
860
|
+
elif is_readable(actions_blob):
|
|
861
|
+
actions_data = actions_blob.read()
|
|
862
|
+
else:
|
|
863
|
+
actions_data = actions_blob
|
|
864
|
+
|
|
865
|
+
content = await self._deserialize_json_field(row[10])
|
|
866
|
+
grounding_metadata = await self._deserialize_json_field(row[11])
|
|
867
|
+
custom_metadata = await self._deserialize_json_field(row[12])
|
|
868
|
+
|
|
869
|
+
results.append(
|
|
870
|
+
EventRecord(
|
|
871
|
+
id=row[0],
|
|
872
|
+
session_id=row[1],
|
|
873
|
+
app_name=row[2],
|
|
874
|
+
user_id=row[3],
|
|
875
|
+
invocation_id=row[4],
|
|
876
|
+
author=row[5],
|
|
877
|
+
actions=_coerce_bytes_payload(actions_data),
|
|
878
|
+
long_running_tool_ids_json=row[7],
|
|
879
|
+
branch=row[8],
|
|
880
|
+
timestamp=row[9],
|
|
881
|
+
content=content,
|
|
882
|
+
grounding_metadata=grounding_metadata,
|
|
883
|
+
custom_metadata=custom_metadata,
|
|
884
|
+
partial=_from_oracle_bool(row[13]),
|
|
885
|
+
turn_complete=_from_oracle_bool(row[14]),
|
|
886
|
+
interrupted=_from_oracle_bool(row[15]),
|
|
887
|
+
error_code=row[16],
|
|
888
|
+
error_message=row[17],
|
|
889
|
+
)
|
|
890
|
+
)
|
|
891
|
+
return results
|
|
892
|
+
except oracledb.DatabaseError as e:
|
|
893
|
+
error_obj = e.args[0] if e.args else None
|
|
894
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
895
|
+
return []
|
|
896
|
+
raise
|
|
897
|
+
|
|
898
|
+
|
|
899
|
+
class OracleSyncADKStore(BaseSyncADKStore["OracleSyncConfig"]):
|
|
900
|
+
"""Oracle synchronous ADK store using oracledb sync driver.
|
|
901
|
+
|
|
902
|
+
Implements session and event storage for Google Agent Development Kit
|
|
903
|
+
using Oracle Database via the python-oracledb synchronous driver. Provides:
|
|
904
|
+
- Session state management with version-specific JSON storage
|
|
905
|
+
- Event history tracking with BLOB-serialized actions
|
|
906
|
+
- TIMESTAMP WITH TIME ZONE for timezone-aware timestamps
|
|
907
|
+
- Foreign key constraints with cascade delete
|
|
908
|
+
- Efficient upserts using MERGE statement
|
|
909
|
+
|
|
910
|
+
Args:
|
|
911
|
+
config: OracleSyncConfig with extension_config["adk"] settings.
|
|
912
|
+
|
|
913
|
+
Example:
|
|
914
|
+
from sqlspec.adapters.oracledb import OracleSyncConfig
|
|
915
|
+
from sqlspec.adapters.oracledb.adk import OracleSyncADKStore
|
|
916
|
+
|
|
917
|
+
config = OracleSyncConfig(
|
|
918
|
+
connection_config={"dsn": "oracle://..."},
|
|
919
|
+
extension_config={
|
|
920
|
+
"adk": {
|
|
921
|
+
"session_table": "my_sessions",
|
|
922
|
+
"events_table": "my_events",
|
|
923
|
+
"owner_id_column": "account_id NUMBER(19) REFERENCES accounts(id)"
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
)
|
|
927
|
+
store = OracleSyncADKStore(config)
|
|
928
|
+
store.ensure_tables()
|
|
929
|
+
|
|
930
|
+
Notes:
|
|
931
|
+
- JSON storage type detected based on Oracle version (21c+, 12c+, legacy)
|
|
932
|
+
- BLOB for pre-serialized actions from Google ADK
|
|
933
|
+
- TIMESTAMP WITH TIME ZONE for timezone-aware timestamps
|
|
934
|
+
- NUMBER(1) for booleans (0/1/NULL)
|
|
935
|
+
- Named parameters using :param_name
|
|
936
|
+
- State merging handled at application level
|
|
937
|
+
- owner_id_column supports NUMBER, VARCHAR2, RAW for Oracle FK types
|
|
938
|
+
- Configuration is read from config.extension_config["adk"]
|
|
939
|
+
"""
|
|
940
|
+
|
|
941
|
+
__slots__ = ("_in_memory", "_json_storage_type", "_oracle_version_info")
|
|
942
|
+
|
|
943
|
+
def __init__(self, config: "OracleSyncConfig") -> None:
|
|
944
|
+
"""Initialize Oracle synchronous ADK store.
|
|
945
|
+
|
|
946
|
+
Args:
|
|
947
|
+
config: OracleSyncConfig instance.
|
|
948
|
+
|
|
949
|
+
Notes:
|
|
950
|
+
Configuration is read from config.extension_config["adk"]:
|
|
951
|
+
- session_table: Sessions table name (default: "adk_sessions")
|
|
952
|
+
- events_table: Events table name (default: "adk_events")
|
|
953
|
+
- owner_id_column: Optional owner FK column DDL (default: None)
|
|
954
|
+
- in_memory: Enable INMEMORY PRIORITY HIGH clause (default: False)
|
|
955
|
+
"""
|
|
956
|
+
super().__init__(config)
|
|
957
|
+
self._json_storage_type: JSONStorageType | None = None
|
|
958
|
+
self._oracle_version_info: OracleVersionInfo | None = None
|
|
959
|
+
|
|
960
|
+
adk_config = config.extension_config.get("adk", {})
|
|
961
|
+
self._in_memory: bool = bool(adk_config.get("in_memory", False))
|
|
962
|
+
|
|
963
|
+
def _get_create_sessions_table_sql(self) -> str:
|
|
964
|
+
"""Get Oracle CREATE TABLE SQL for sessions table.
|
|
965
|
+
|
|
966
|
+
Auto-detects optimal JSON storage type based on Oracle version.
|
|
967
|
+
Result is cached to minimize database queries.
|
|
968
|
+
"""
|
|
969
|
+
storage_type = self._detect_json_storage_type()
|
|
970
|
+
return self._get_create_sessions_table_sql_for_type(storage_type)
|
|
971
|
+
|
|
972
|
+
def _get_create_events_table_sql(self) -> str:
|
|
973
|
+
"""Get Oracle CREATE TABLE SQL for events table.
|
|
974
|
+
|
|
975
|
+
Auto-detects optimal JSON storage type based on Oracle version.
|
|
976
|
+
Result is cached to minimize database queries.
|
|
977
|
+
"""
|
|
978
|
+
storage_type = self._detect_json_storage_type()
|
|
979
|
+
return self._get_create_events_table_sql_for_type(storage_type)
|
|
980
|
+
|
|
981
|
+
def _detect_json_storage_type(self) -> JSONStorageType:
|
|
982
|
+
"""Detect the appropriate JSON storage type based on Oracle version.
|
|
983
|
+
|
|
984
|
+
Returns:
|
|
985
|
+
Appropriate JSONStorageType for this Oracle version.
|
|
986
|
+
|
|
987
|
+
Notes:
|
|
988
|
+
Queries product_component_version to determine Oracle version.
|
|
989
|
+
- Oracle 21c+ with compatible >= 20: Native JSON type
|
|
990
|
+
- Oracle 12c+: BLOB with IS JSON constraint (preferred)
|
|
991
|
+
- Oracle 11g and earlier: BLOB without constraint
|
|
992
|
+
|
|
993
|
+
BLOB is preferred over CLOB for 12c+ as per Oracle recommendations.
|
|
994
|
+
Result is cached in self._json_storage_type.
|
|
995
|
+
"""
|
|
996
|
+
if self._json_storage_type is not None:
|
|
997
|
+
return self._json_storage_type
|
|
998
|
+
|
|
999
|
+
version_info = self._get_version_info()
|
|
1000
|
+
self._json_storage_type = _storage_type_from_version(version_info)
|
|
1001
|
+
return self._json_storage_type
|
|
1002
|
+
|
|
1003
|
+
def _get_version_info(self) -> "OracleVersionInfo | None":
|
|
1004
|
+
"""Return cached Oracle version info using Oracle data dictionary."""
|
|
1005
|
+
|
|
1006
|
+
if self._oracle_version_info is not None:
|
|
1007
|
+
return self._oracle_version_info
|
|
1008
|
+
|
|
1009
|
+
with self._config.provide_session() as driver:
|
|
1010
|
+
dictionary = OracledbSyncDataDictionary()
|
|
1011
|
+
self._oracle_version_info = dictionary.get_version(driver)
|
|
1012
|
+
|
|
1013
|
+
if self._oracle_version_info is None:
|
|
1014
|
+
logger.warning("Could not detect Oracle version, defaulting to BLOB_JSON storage")
|
|
1015
|
+
|
|
1016
|
+
return self._oracle_version_info
|
|
1017
|
+
|
|
1018
|
+
def _serialize_state(self, state: "dict[str, Any]") -> "str | bytes":
|
|
1019
|
+
"""Serialize state dictionary to appropriate format based on storage type.
|
|
1020
|
+
|
|
1021
|
+
Args:
|
|
1022
|
+
state: State dictionary to serialize.
|
|
1023
|
+
|
|
1024
|
+
Returns:
|
|
1025
|
+
JSON string for JSON_NATIVE, bytes for BLOB types.
|
|
1026
|
+
"""
|
|
1027
|
+
storage_type = self._detect_json_storage_type()
|
|
1028
|
+
|
|
1029
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
1030
|
+
return to_json(state)
|
|
1031
|
+
|
|
1032
|
+
return to_json(state, as_bytes=True)
|
|
1033
|
+
|
|
1034
|
+
def _deserialize_state(self, data: Any) -> "dict[str, Any]":
|
|
1035
|
+
"""Deserialize state data from database format.
|
|
1036
|
+
|
|
1037
|
+
Args:
|
|
1038
|
+
data: Data from database (may be LOB, str, bytes, or dict).
|
|
1039
|
+
|
|
1040
|
+
Returns:
|
|
1041
|
+
Deserialized state dictionary.
|
|
1042
|
+
|
|
1043
|
+
Notes:
|
|
1044
|
+
Handles LOB reading if data has read() method.
|
|
1045
|
+
Oracle JSON type may return dict directly.
|
|
1046
|
+
"""
|
|
1047
|
+
if is_readable(data):
|
|
1048
|
+
data = data.read()
|
|
1049
|
+
|
|
1050
|
+
if isinstance(data, dict):
|
|
1051
|
+
return cast("dict[str, Any]", _coerce_decimal_values(data))
|
|
1052
|
+
|
|
1053
|
+
if isinstance(data, bytes):
|
|
1054
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
1055
|
+
|
|
1056
|
+
if isinstance(data, str):
|
|
1057
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
1058
|
+
|
|
1059
|
+
return from_json(str(data)) # type: ignore[no-any-return]
|
|
1060
|
+
|
|
1061
|
+
def _serialize_json_field(self, value: Any) -> "str | bytes | None":
|
|
1062
|
+
"""Serialize optional JSON field for event storage.
|
|
1063
|
+
|
|
1064
|
+
Args:
|
|
1065
|
+
value: Value to serialize (dict or None).
|
|
1066
|
+
|
|
1067
|
+
Returns:
|
|
1068
|
+
Serialized JSON or None.
|
|
1069
|
+
"""
|
|
1070
|
+
if value is None:
|
|
1071
|
+
return None
|
|
1072
|
+
|
|
1073
|
+
storage_type = self._detect_json_storage_type()
|
|
1074
|
+
|
|
1075
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
1076
|
+
return to_json(value)
|
|
1077
|
+
|
|
1078
|
+
return to_json(value, as_bytes=True)
|
|
1079
|
+
|
|
1080
|
+
def _deserialize_json_field(self, data: Any) -> "dict[str, Any] | None":
|
|
1081
|
+
"""Deserialize optional JSON field from database.
|
|
1082
|
+
|
|
1083
|
+
Args:
|
|
1084
|
+
data: Data from database (may be LOB, str, bytes, dict, or None).
|
|
1085
|
+
|
|
1086
|
+
Returns:
|
|
1087
|
+
Deserialized dictionary or None.
|
|
1088
|
+
|
|
1089
|
+
Notes:
|
|
1090
|
+
Oracle JSON type may return dict directly.
|
|
1091
|
+
"""
|
|
1092
|
+
if data is None:
|
|
1093
|
+
return None
|
|
1094
|
+
|
|
1095
|
+
if is_readable(data):
|
|
1096
|
+
data = data.read()
|
|
1097
|
+
|
|
1098
|
+
if isinstance(data, dict):
|
|
1099
|
+
return cast("dict[str, Any]", _coerce_decimal_values(data))
|
|
1100
|
+
|
|
1101
|
+
if isinstance(data, bytes):
|
|
1102
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
1103
|
+
|
|
1104
|
+
if isinstance(data, str):
|
|
1105
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
1106
|
+
|
|
1107
|
+
return from_json(str(data)) # type: ignore[no-any-return]
|
|
1108
|
+
|
|
1109
|
+
def _get_create_sessions_table_sql_for_type(self, storage_type: JSONStorageType) -> str:
|
|
1110
|
+
"""Get Oracle CREATE TABLE SQL for sessions with specified storage type.
|
|
1111
|
+
|
|
1112
|
+
Args:
|
|
1113
|
+
storage_type: JSON storage type to use.
|
|
1114
|
+
|
|
1115
|
+
Returns:
|
|
1116
|
+
SQL statement to create adk_sessions table.
|
|
1117
|
+
"""
|
|
1118
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
1119
|
+
state_column = "state JSON NOT NULL"
|
|
1120
|
+
elif storage_type == JSONStorageType.BLOB_JSON:
|
|
1121
|
+
state_column = "state BLOB CHECK (state IS JSON) NOT NULL"
|
|
1122
|
+
else:
|
|
1123
|
+
state_column = "state BLOB NOT NULL"
|
|
1124
|
+
|
|
1125
|
+
owner_id_column_sql = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
1126
|
+
inmemory_clause = " INMEMORY PRIORITY HIGH" if self._in_memory else ""
|
|
1127
|
+
|
|
1128
|
+
return f"""
|
|
1129
|
+
BEGIN
|
|
1130
|
+
EXECUTE IMMEDIATE 'CREATE TABLE {self._session_table} (
|
|
1131
|
+
id VARCHAR2(128) PRIMARY KEY,
|
|
1132
|
+
app_name VARCHAR2(128) NOT NULL,
|
|
1133
|
+
user_id VARCHAR2(128) NOT NULL,
|
|
1134
|
+
{state_column},
|
|
1135
|
+
create_time TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL,
|
|
1136
|
+
update_time TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL{owner_id_column_sql}
|
|
1137
|
+
){inmemory_clause}';
|
|
1138
|
+
EXCEPTION
|
|
1139
|
+
WHEN OTHERS THEN
|
|
1140
|
+
IF SQLCODE != -955 THEN
|
|
1141
|
+
RAISE;
|
|
1142
|
+
END IF;
|
|
1143
|
+
END;
|
|
1144
|
+
|
|
1145
|
+
BEGIN
|
|
1146
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._session_table}_app_user
|
|
1147
|
+
ON {self._session_table}(app_name, user_id)';
|
|
1148
|
+
EXCEPTION
|
|
1149
|
+
WHEN OTHERS THEN
|
|
1150
|
+
IF SQLCODE != -955 THEN
|
|
1151
|
+
RAISE;
|
|
1152
|
+
END IF;
|
|
1153
|
+
END;
|
|
1154
|
+
|
|
1155
|
+
BEGIN
|
|
1156
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._session_table}_update_time
|
|
1157
|
+
ON {self._session_table}(update_time DESC)';
|
|
1158
|
+
EXCEPTION
|
|
1159
|
+
WHEN OTHERS THEN
|
|
1160
|
+
IF SQLCODE != -955 THEN
|
|
1161
|
+
RAISE;
|
|
1162
|
+
END IF;
|
|
1163
|
+
END;
|
|
1164
|
+
"""
|
|
1165
|
+
|
|
1166
|
+
def _get_create_events_table_sql_for_type(self, storage_type: JSONStorageType) -> str:
|
|
1167
|
+
"""Get Oracle CREATE TABLE SQL for events with specified storage type.
|
|
1168
|
+
|
|
1169
|
+
Args:
|
|
1170
|
+
storage_type: JSON storage type to use.
|
|
1171
|
+
|
|
1172
|
+
Returns:
|
|
1173
|
+
SQL statement to create adk_events table.
|
|
1174
|
+
"""
|
|
1175
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
1176
|
+
json_columns = """
|
|
1177
|
+
content JSON,
|
|
1178
|
+
grounding_metadata JSON,
|
|
1179
|
+
custom_metadata JSON,
|
|
1180
|
+
long_running_tool_ids_json JSON
|
|
1181
|
+
"""
|
|
1182
|
+
elif storage_type == JSONStorageType.BLOB_JSON:
|
|
1183
|
+
json_columns = """
|
|
1184
|
+
content BLOB CHECK (content IS JSON),
|
|
1185
|
+
grounding_metadata BLOB CHECK (grounding_metadata IS JSON),
|
|
1186
|
+
custom_metadata BLOB CHECK (custom_metadata IS JSON),
|
|
1187
|
+
long_running_tool_ids_json BLOB CHECK (long_running_tool_ids_json IS JSON)
|
|
1188
|
+
"""
|
|
1189
|
+
else:
|
|
1190
|
+
json_columns = """
|
|
1191
|
+
content BLOB,
|
|
1192
|
+
grounding_metadata BLOB,
|
|
1193
|
+
custom_metadata BLOB,
|
|
1194
|
+
long_running_tool_ids_json BLOB
|
|
1195
|
+
"""
|
|
1196
|
+
|
|
1197
|
+
inmemory_clause = " INMEMORY PRIORITY HIGH" if self._in_memory else ""
|
|
1198
|
+
|
|
1199
|
+
return f"""
|
|
1200
|
+
BEGIN
|
|
1201
|
+
EXECUTE IMMEDIATE 'CREATE TABLE {self._events_table} (
|
|
1202
|
+
id VARCHAR2(128) PRIMARY KEY,
|
|
1203
|
+
session_id VARCHAR2(128) NOT NULL,
|
|
1204
|
+
app_name VARCHAR2(128) NOT NULL,
|
|
1205
|
+
user_id VARCHAR2(128) NOT NULL,
|
|
1206
|
+
invocation_id VARCHAR2(256),
|
|
1207
|
+
author VARCHAR2(256),
|
|
1208
|
+
actions BLOB,
|
|
1209
|
+
branch VARCHAR2(256),
|
|
1210
|
+
timestamp TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL,
|
|
1211
|
+
{json_columns},
|
|
1212
|
+
partial NUMBER(1),
|
|
1213
|
+
turn_complete NUMBER(1),
|
|
1214
|
+
interrupted NUMBER(1),
|
|
1215
|
+
error_code VARCHAR2(256),
|
|
1216
|
+
error_message VARCHAR2(1024),
|
|
1217
|
+
CONSTRAINT fk_{self._events_table}_session FOREIGN KEY (session_id)
|
|
1218
|
+
REFERENCES {self._session_table}(id) ON DELETE CASCADE
|
|
1219
|
+
){inmemory_clause}';
|
|
1220
|
+
EXCEPTION
|
|
1221
|
+
WHEN OTHERS THEN
|
|
1222
|
+
IF SQLCODE != -955 THEN
|
|
1223
|
+
RAISE;
|
|
1224
|
+
END IF;
|
|
1225
|
+
END;
|
|
1226
|
+
|
|
1227
|
+
BEGIN
|
|
1228
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._events_table}_session
|
|
1229
|
+
ON {self._events_table}(session_id, timestamp ASC)';
|
|
1230
|
+
EXCEPTION
|
|
1231
|
+
WHEN OTHERS THEN
|
|
1232
|
+
IF SQLCODE != -955 THEN
|
|
1233
|
+
RAISE;
|
|
1234
|
+
END IF;
|
|
1235
|
+
END;
|
|
1236
|
+
"""
|
|
1237
|
+
|
|
1238
|
+
def _get_drop_tables_sql(self) -> "list[str]":
|
|
1239
|
+
"""Get Oracle DROP TABLE SQL statements.
|
|
1240
|
+
|
|
1241
|
+
Returns:
|
|
1242
|
+
List of SQL statements to drop tables and indexes.
|
|
1243
|
+
|
|
1244
|
+
Notes:
|
|
1245
|
+
Order matters: drop events table (child) before sessions (parent).
|
|
1246
|
+
Oracle automatically drops indexes when dropping tables.
|
|
1247
|
+
"""
|
|
1248
|
+
return [
|
|
1249
|
+
f"""
|
|
1250
|
+
BEGIN
|
|
1251
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._events_table}_session';
|
|
1252
|
+
EXCEPTION
|
|
1253
|
+
WHEN OTHERS THEN
|
|
1254
|
+
IF SQLCODE != -1418 THEN
|
|
1255
|
+
RAISE;
|
|
1256
|
+
END IF;
|
|
1257
|
+
END;
|
|
1258
|
+
""",
|
|
1259
|
+
f"""
|
|
1260
|
+
BEGIN
|
|
1261
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._session_table}_update_time';
|
|
1262
|
+
EXCEPTION
|
|
1263
|
+
WHEN OTHERS THEN
|
|
1264
|
+
IF SQLCODE != -1418 THEN
|
|
1265
|
+
RAISE;
|
|
1266
|
+
END IF;
|
|
1267
|
+
END;
|
|
1268
|
+
""",
|
|
1269
|
+
f"""
|
|
1270
|
+
BEGIN
|
|
1271
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._session_table}_app_user';
|
|
1272
|
+
EXCEPTION
|
|
1273
|
+
WHEN OTHERS THEN
|
|
1274
|
+
IF SQLCODE != -1418 THEN
|
|
1275
|
+
RAISE;
|
|
1276
|
+
END IF;
|
|
1277
|
+
END;
|
|
1278
|
+
""",
|
|
1279
|
+
f"""
|
|
1280
|
+
BEGIN
|
|
1281
|
+
EXECUTE IMMEDIATE 'DROP TABLE {self._events_table}';
|
|
1282
|
+
EXCEPTION
|
|
1283
|
+
WHEN OTHERS THEN
|
|
1284
|
+
IF SQLCODE != -942 THEN
|
|
1285
|
+
RAISE;
|
|
1286
|
+
END IF;
|
|
1287
|
+
END;
|
|
1288
|
+
""",
|
|
1289
|
+
f"""
|
|
1290
|
+
BEGIN
|
|
1291
|
+
EXECUTE IMMEDIATE 'DROP TABLE {self._session_table}';
|
|
1292
|
+
EXCEPTION
|
|
1293
|
+
WHEN OTHERS THEN
|
|
1294
|
+
IF SQLCODE != -942 THEN
|
|
1295
|
+
RAISE;
|
|
1296
|
+
END IF;
|
|
1297
|
+
END;
|
|
1298
|
+
""",
|
|
1299
|
+
]
|
|
1300
|
+
|
|
1301
|
+
def create_tables(self) -> None:
|
|
1302
|
+
"""Create both sessions and events tables if they don't exist.
|
|
1303
|
+
|
|
1304
|
+
Notes:
|
|
1305
|
+
Detects Oracle version to determine optimal JSON storage type.
|
|
1306
|
+
Uses version-appropriate table schema.
|
|
1307
|
+
"""
|
|
1308
|
+
storage_type = self._detect_json_storage_type()
|
|
1309
|
+
logger.info("Creating ADK tables with storage type: %s", storage_type)
|
|
1310
|
+
|
|
1311
|
+
with self._config.provide_session() as driver:
|
|
1312
|
+
sessions_sql = SQL(self._get_create_sessions_table_sql_for_type(storage_type))
|
|
1313
|
+
driver.execute_script(sessions_sql)
|
|
1314
|
+
|
|
1315
|
+
events_sql = SQL(self._get_create_events_table_sql_for_type(storage_type))
|
|
1316
|
+
driver.execute_script(events_sql)
|
|
1317
|
+
|
|
1318
|
+
def create_session(
|
|
1319
|
+
self, session_id: str, app_name: str, user_id: str, state: "dict[str, Any]", owner_id: "Any | None" = None
|
|
1320
|
+
) -> SessionRecord:
|
|
1321
|
+
"""Create a new session.
|
|
1322
|
+
|
|
1323
|
+
Args:
|
|
1324
|
+
session_id: Unique session identifier.
|
|
1325
|
+
app_name: Application name.
|
|
1326
|
+
user_id: User identifier.
|
|
1327
|
+
state: Initial session state.
|
|
1328
|
+
owner_id: Optional owner ID value for owner_id_column (if configured).
|
|
1329
|
+
|
|
1330
|
+
Returns:
|
|
1331
|
+
Created session record.
|
|
1332
|
+
|
|
1333
|
+
Notes:
|
|
1334
|
+
Uses SYSTIMESTAMP for create_time and update_time.
|
|
1335
|
+
State is serialized using version-appropriate format.
|
|
1336
|
+
owner_id is ignored if owner_id_column not configured.
|
|
1337
|
+
"""
|
|
1338
|
+
state_data = self._serialize_state(state)
|
|
1339
|
+
|
|
1340
|
+
if self._owner_id_column_name:
|
|
1341
|
+
sql = f"""
|
|
1342
|
+
INSERT INTO {self._session_table} (id, app_name, user_id, state, create_time, update_time, {self._owner_id_column_name})
|
|
1343
|
+
VALUES (:id, :app_name, :user_id, :state, SYSTIMESTAMP, SYSTIMESTAMP, :owner_id)
|
|
1344
|
+
"""
|
|
1345
|
+
params = {
|
|
1346
|
+
"id": session_id,
|
|
1347
|
+
"app_name": app_name,
|
|
1348
|
+
"user_id": user_id,
|
|
1349
|
+
"state": state_data,
|
|
1350
|
+
"owner_id": owner_id,
|
|
1351
|
+
}
|
|
1352
|
+
else:
|
|
1353
|
+
sql = f"""
|
|
1354
|
+
INSERT INTO {self._session_table} (id, app_name, user_id, state, create_time, update_time)
|
|
1355
|
+
VALUES (:id, :app_name, :user_id, :state, SYSTIMESTAMP, SYSTIMESTAMP)
|
|
1356
|
+
"""
|
|
1357
|
+
params = {"id": session_id, "app_name": app_name, "user_id": user_id, "state": state_data}
|
|
1358
|
+
|
|
1359
|
+
with self._config.provide_connection() as conn:
|
|
1360
|
+
cursor = conn.cursor()
|
|
1361
|
+
cursor.execute(sql, params)
|
|
1362
|
+
conn.commit()
|
|
1363
|
+
|
|
1364
|
+
return self.get_session(session_id) # type: ignore[return-value]
|
|
1365
|
+
|
|
1366
|
+
def get_session(self, session_id: str) -> "SessionRecord | None":
|
|
1367
|
+
"""Get session by ID.
|
|
1368
|
+
|
|
1369
|
+
Args:
|
|
1370
|
+
session_id: Session identifier.
|
|
1371
|
+
|
|
1372
|
+
Returns:
|
|
1373
|
+
Session record or None if not found.
|
|
1374
|
+
|
|
1375
|
+
Notes:
|
|
1376
|
+
Oracle returns datetime objects for TIMESTAMP columns.
|
|
1377
|
+
State is deserialized using version-appropriate format.
|
|
1378
|
+
"""
|
|
1379
|
+
|
|
1380
|
+
sql = f"""
|
|
1381
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
1382
|
+
FROM {self._session_table}
|
|
1383
|
+
WHERE id = :id
|
|
1384
|
+
"""
|
|
1385
|
+
|
|
1386
|
+
try:
|
|
1387
|
+
with self._config.provide_connection() as conn:
|
|
1388
|
+
cursor = conn.cursor()
|
|
1389
|
+
cursor.execute(sql, {"id": session_id})
|
|
1390
|
+
row = cursor.fetchone()
|
|
1391
|
+
|
|
1392
|
+
if row is None:
|
|
1393
|
+
return None
|
|
1394
|
+
|
|
1395
|
+
session_id_val, app_name, user_id, state_data, create_time, update_time = row
|
|
1396
|
+
|
|
1397
|
+
state = self._deserialize_state(state_data)
|
|
1398
|
+
|
|
1399
|
+
return SessionRecord(
|
|
1400
|
+
id=session_id_val,
|
|
1401
|
+
app_name=app_name,
|
|
1402
|
+
user_id=user_id,
|
|
1403
|
+
state=state,
|
|
1404
|
+
create_time=create_time,
|
|
1405
|
+
update_time=update_time,
|
|
1406
|
+
)
|
|
1407
|
+
except oracledb.DatabaseError as e:
|
|
1408
|
+
error_obj = e.args[0] if e.args else None
|
|
1409
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
1410
|
+
return None
|
|
1411
|
+
raise
|
|
1412
|
+
|
|
1413
|
+
def update_session_state(self, session_id: str, state: "dict[str, Any]") -> None:
|
|
1414
|
+
"""Update session state.
|
|
1415
|
+
|
|
1416
|
+
Args:
|
|
1417
|
+
session_id: Session identifier.
|
|
1418
|
+
state: New state dictionary (replaces existing state).
|
|
1419
|
+
|
|
1420
|
+
Notes:
|
|
1421
|
+
This replaces the entire state dictionary.
|
|
1422
|
+
Updates update_time to current timestamp.
|
|
1423
|
+
State is serialized using version-appropriate format.
|
|
1424
|
+
"""
|
|
1425
|
+
state_data = self._serialize_state(state)
|
|
1426
|
+
|
|
1427
|
+
sql = f"""
|
|
1428
|
+
UPDATE {self._session_table}
|
|
1429
|
+
SET state = :state, update_time = SYSTIMESTAMP
|
|
1430
|
+
WHERE id = :id
|
|
1431
|
+
"""
|
|
1432
|
+
|
|
1433
|
+
with self._config.provide_connection() as conn:
|
|
1434
|
+
cursor = conn.cursor()
|
|
1435
|
+
cursor.execute(sql, {"state": state_data, "id": session_id})
|
|
1436
|
+
conn.commit()
|
|
1437
|
+
|
|
1438
|
+
def delete_session(self, session_id: str) -> None:
|
|
1439
|
+
"""Delete session and all associated events (cascade).
|
|
1440
|
+
|
|
1441
|
+
Args:
|
|
1442
|
+
session_id: Session identifier.
|
|
1443
|
+
|
|
1444
|
+
Notes:
|
|
1445
|
+
Foreign key constraint ensures events are cascade-deleted.
|
|
1446
|
+
"""
|
|
1447
|
+
sql = f"DELETE FROM {self._session_table} WHERE id = :id"
|
|
1448
|
+
|
|
1449
|
+
with self._config.provide_connection() as conn:
|
|
1450
|
+
cursor = conn.cursor()
|
|
1451
|
+
cursor.execute(sql, {"id": session_id})
|
|
1452
|
+
conn.commit()
|
|
1453
|
+
|
|
1454
|
+
def list_sessions(self, app_name: str, user_id: str | None = None) -> "list[SessionRecord]":
|
|
1455
|
+
"""List sessions for an app, optionally filtered by user.
|
|
1456
|
+
|
|
1457
|
+
Args:
|
|
1458
|
+
app_name: Application name.
|
|
1459
|
+
user_id: User identifier. If None, lists all sessions for the app.
|
|
1460
|
+
|
|
1461
|
+
Returns:
|
|
1462
|
+
List of session records ordered by update_time DESC.
|
|
1463
|
+
|
|
1464
|
+
Notes:
|
|
1465
|
+
Uses composite index on (app_name, user_id) when user_id is provided.
|
|
1466
|
+
State is deserialized using version-appropriate format.
|
|
1467
|
+
"""
|
|
1468
|
+
|
|
1469
|
+
if user_id is None:
|
|
1470
|
+
sql = f"""
|
|
1471
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
1472
|
+
FROM {self._session_table}
|
|
1473
|
+
WHERE app_name = :app_name
|
|
1474
|
+
ORDER BY update_time DESC
|
|
1475
|
+
"""
|
|
1476
|
+
params = {"app_name": app_name}
|
|
1477
|
+
else:
|
|
1478
|
+
sql = f"""
|
|
1479
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
1480
|
+
FROM {self._session_table}
|
|
1481
|
+
WHERE app_name = :app_name AND user_id = :user_id
|
|
1482
|
+
ORDER BY update_time DESC
|
|
1483
|
+
"""
|
|
1484
|
+
params = {"app_name": app_name, "user_id": user_id}
|
|
1485
|
+
|
|
1486
|
+
try:
|
|
1487
|
+
with self._config.provide_connection() as conn:
|
|
1488
|
+
cursor = conn.cursor()
|
|
1489
|
+
cursor.execute(sql, params)
|
|
1490
|
+
rows = cursor.fetchall()
|
|
1491
|
+
|
|
1492
|
+
results = []
|
|
1493
|
+
for row in rows:
|
|
1494
|
+
state = self._deserialize_state(row[3])
|
|
1495
|
+
|
|
1496
|
+
results.append(
|
|
1497
|
+
SessionRecord(
|
|
1498
|
+
id=row[0],
|
|
1499
|
+
app_name=row[1],
|
|
1500
|
+
user_id=row[2],
|
|
1501
|
+
state=state,
|
|
1502
|
+
create_time=row[4],
|
|
1503
|
+
update_time=row[5],
|
|
1504
|
+
)
|
|
1505
|
+
)
|
|
1506
|
+
return results
|
|
1507
|
+
except oracledb.DatabaseError as e:
|
|
1508
|
+
error_obj = e.args[0] if e.args else None
|
|
1509
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
1510
|
+
return []
|
|
1511
|
+
raise
|
|
1512
|
+
|
|
1513
|
+
def create_event(
|
|
1514
|
+
self,
|
|
1515
|
+
event_id: str,
|
|
1516
|
+
session_id: str,
|
|
1517
|
+
app_name: str,
|
|
1518
|
+
user_id: str,
|
|
1519
|
+
author: "str | None" = None,
|
|
1520
|
+
actions: "bytes | None" = None,
|
|
1521
|
+
content: "dict[str, Any] | None" = None,
|
|
1522
|
+
**kwargs: Any,
|
|
1523
|
+
) -> "EventRecord":
|
|
1524
|
+
"""Create a new event.
|
|
1525
|
+
|
|
1526
|
+
Args:
|
|
1527
|
+
event_id: Unique event identifier.
|
|
1528
|
+
session_id: Session identifier.
|
|
1529
|
+
app_name: Application name.
|
|
1530
|
+
user_id: User identifier.
|
|
1531
|
+
author: Event author (user/assistant/system).
|
|
1532
|
+
actions: Pickled actions object.
|
|
1533
|
+
content: Event content (JSONB/JSON).
|
|
1534
|
+
**kwargs: Additional optional fields.
|
|
1535
|
+
|
|
1536
|
+
Returns:
|
|
1537
|
+
Created event record.
|
|
1538
|
+
|
|
1539
|
+
Notes:
|
|
1540
|
+
Uses SYSTIMESTAMP for timestamp if not provided.
|
|
1541
|
+
JSON fields are serialized using version-appropriate format.
|
|
1542
|
+
Boolean fields are converted to NUMBER(1).
|
|
1543
|
+
"""
|
|
1544
|
+
content_data = self._serialize_json_field(content)
|
|
1545
|
+
grounding_metadata_data = self._serialize_json_field(kwargs.get("grounding_metadata"))
|
|
1546
|
+
custom_metadata_data = self._serialize_json_field(kwargs.get("custom_metadata"))
|
|
1547
|
+
|
|
1548
|
+
sql = f"""
|
|
1549
|
+
INSERT INTO {self._events_table} (
|
|
1550
|
+
id, session_id, app_name, user_id, invocation_id, author, actions,
|
|
1551
|
+
long_running_tool_ids_json, branch, timestamp, content,
|
|
1552
|
+
grounding_metadata, custom_metadata, partial, turn_complete,
|
|
1553
|
+
interrupted, error_code, error_message
|
|
1554
|
+
) VALUES (
|
|
1555
|
+
:id, :session_id, :app_name, :user_id, :invocation_id, :author, :actions,
|
|
1556
|
+
:long_running_tool_ids_json, :branch, :timestamp, :content,
|
|
1557
|
+
:grounding_metadata, :custom_metadata, :partial, :turn_complete,
|
|
1558
|
+
:interrupted, :error_code, :error_message
|
|
1559
|
+
)
|
|
1560
|
+
"""
|
|
1561
|
+
|
|
1562
|
+
with self._config.provide_connection() as conn:
|
|
1563
|
+
cursor = conn.cursor()
|
|
1564
|
+
cursor.execute(
|
|
1565
|
+
sql,
|
|
1566
|
+
{
|
|
1567
|
+
"id": event_id,
|
|
1568
|
+
"session_id": session_id,
|
|
1569
|
+
"app_name": app_name,
|
|
1570
|
+
"user_id": user_id,
|
|
1571
|
+
"invocation_id": kwargs.get("invocation_id"),
|
|
1572
|
+
"author": author,
|
|
1573
|
+
"actions": actions,
|
|
1574
|
+
"long_running_tool_ids_json": kwargs.get("long_running_tool_ids_json"),
|
|
1575
|
+
"branch": kwargs.get("branch"),
|
|
1576
|
+
"timestamp": kwargs.get("timestamp"),
|
|
1577
|
+
"content": content_data,
|
|
1578
|
+
"grounding_metadata": grounding_metadata_data,
|
|
1579
|
+
"custom_metadata": custom_metadata_data,
|
|
1580
|
+
"partial": _to_oracle_bool(kwargs.get("partial")),
|
|
1581
|
+
"turn_complete": _to_oracle_bool(kwargs.get("turn_complete")),
|
|
1582
|
+
"interrupted": _to_oracle_bool(kwargs.get("interrupted")),
|
|
1583
|
+
"error_code": kwargs.get("error_code"),
|
|
1584
|
+
"error_message": kwargs.get("error_message"),
|
|
1585
|
+
},
|
|
1586
|
+
)
|
|
1587
|
+
conn.commit()
|
|
1588
|
+
|
|
1589
|
+
events = self.list_events(session_id)
|
|
1590
|
+
for event in events:
|
|
1591
|
+
if event["id"] == event_id:
|
|
1592
|
+
return event
|
|
1593
|
+
|
|
1594
|
+
msg = f"Failed to retrieve created event {event_id}"
|
|
1595
|
+
raise RuntimeError(msg)
|
|
1596
|
+
|
|
1597
|
+
def list_events(self, session_id: str) -> "list[EventRecord]":
|
|
1598
|
+
"""List events for a session ordered by timestamp.
|
|
1599
|
+
|
|
1600
|
+
Args:
|
|
1601
|
+
session_id: Session identifier.
|
|
1602
|
+
|
|
1603
|
+
Returns:
|
|
1604
|
+
List of event records ordered by timestamp ASC.
|
|
1605
|
+
|
|
1606
|
+
Notes:
|
|
1607
|
+
Uses index on (session_id, timestamp ASC).
|
|
1608
|
+
JSON fields deserialized using version-appropriate format.
|
|
1609
|
+
Converts BLOB actions to bytes and NUMBER(1) booleans to Python bool.
|
|
1610
|
+
"""
|
|
1611
|
+
|
|
1612
|
+
sql = f"""
|
|
1613
|
+
SELECT id, session_id, app_name, user_id, invocation_id, author, actions,
|
|
1614
|
+
long_running_tool_ids_json, branch, timestamp, content,
|
|
1615
|
+
grounding_metadata, custom_metadata, partial, turn_complete,
|
|
1616
|
+
interrupted, error_code, error_message
|
|
1617
|
+
FROM {self._events_table}
|
|
1618
|
+
WHERE session_id = :session_id
|
|
1619
|
+
ORDER BY timestamp ASC
|
|
1620
|
+
"""
|
|
1621
|
+
|
|
1622
|
+
try:
|
|
1623
|
+
with self._config.provide_connection() as conn:
|
|
1624
|
+
cursor = conn.cursor()
|
|
1625
|
+
cursor.execute(sql, {"session_id": session_id})
|
|
1626
|
+
rows = cursor.fetchall()
|
|
1627
|
+
|
|
1628
|
+
results = []
|
|
1629
|
+
for row in rows:
|
|
1630
|
+
actions_blob = row[6]
|
|
1631
|
+
actions_data = actions_blob.read() if is_readable(actions_blob) else actions_blob
|
|
1632
|
+
|
|
1633
|
+
content = self._deserialize_json_field(row[10])
|
|
1634
|
+
grounding_metadata = self._deserialize_json_field(row[11])
|
|
1635
|
+
custom_metadata = self._deserialize_json_field(row[12])
|
|
1636
|
+
|
|
1637
|
+
results.append(
|
|
1638
|
+
EventRecord(
|
|
1639
|
+
id=row[0],
|
|
1640
|
+
session_id=row[1],
|
|
1641
|
+
app_name=row[2],
|
|
1642
|
+
user_id=row[3],
|
|
1643
|
+
invocation_id=row[4],
|
|
1644
|
+
author=row[5],
|
|
1645
|
+
actions=_coerce_bytes_payload(actions_data),
|
|
1646
|
+
long_running_tool_ids_json=row[7],
|
|
1647
|
+
branch=row[8],
|
|
1648
|
+
timestamp=row[9],
|
|
1649
|
+
content=content,
|
|
1650
|
+
grounding_metadata=grounding_metadata,
|
|
1651
|
+
custom_metadata=custom_metadata,
|
|
1652
|
+
partial=_from_oracle_bool(row[13]),
|
|
1653
|
+
turn_complete=_from_oracle_bool(row[14]),
|
|
1654
|
+
interrupted=_from_oracle_bool(row[15]),
|
|
1655
|
+
error_code=row[16],
|
|
1656
|
+
error_message=row[17],
|
|
1657
|
+
)
|
|
1658
|
+
)
|
|
1659
|
+
return results
|
|
1660
|
+
except oracledb.DatabaseError as e:
|
|
1661
|
+
error_obj = e.args[0] if e.args else None
|
|
1662
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
1663
|
+
return []
|
|
1664
|
+
raise
|
|
1665
|
+
|
|
1666
|
+
|
|
1667
|
+
ORACLE_DUPLICATE_KEY_ERROR: Final = 1
|
|
1668
|
+
|
|
1669
|
+
|
|
1670
|
+
def _extract_json_value(data: Any) -> "dict[str, Any]":
|
|
1671
|
+
if isinstance(data, dict):
|
|
1672
|
+
return cast("dict[str, Any]", coerce_decimal_values(data))
|
|
1673
|
+
if isinstance(data, bytes):
|
|
1674
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
1675
|
+
if isinstance(data, str):
|
|
1676
|
+
return from_json(data) # type: ignore[no-any-return]
|
|
1677
|
+
return from_json(str(data)) # type: ignore[no-any-return]
|
|
1678
|
+
|
|
1679
|
+
|
|
1680
|
+
async def _read_lob_async(data: Any) -> Any:
|
|
1681
|
+
if is_async_readable(data):
|
|
1682
|
+
return await data.read()
|
|
1683
|
+
if is_readable(data):
|
|
1684
|
+
return data.read()
|
|
1685
|
+
return data
|
|
1686
|
+
|
|
1687
|
+
|
|
1688
|
+
def _read_lob_sync(data: Any) -> Any:
|
|
1689
|
+
if is_readable(data):
|
|
1690
|
+
return data.read()
|
|
1691
|
+
return data
|
|
1692
|
+
|
|
1693
|
+
|
|
1694
|
+
class OracleAsyncADKMemoryStore(BaseAsyncADKMemoryStore["OracleAsyncConfig"]):
|
|
1695
|
+
"""Oracle ADK memory store using async oracledb driver."""
|
|
1696
|
+
|
|
1697
|
+
__slots__ = ("_in_memory", "_json_storage_type", "_oracle_version_info")
|
|
1698
|
+
|
|
1699
|
+
def __init__(self, config: "OracleAsyncConfig") -> None:
|
|
1700
|
+
super().__init__(config)
|
|
1701
|
+
self._json_storage_type: JSONStorageType | None = None
|
|
1702
|
+
self._oracle_version_info: OracleVersionInfo | None = None
|
|
1703
|
+
adk_config = config.extension_config.get("adk", {})
|
|
1704
|
+
self._in_memory: bool = bool(adk_config.get("in_memory", False))
|
|
1705
|
+
|
|
1706
|
+
async def _detect_json_storage_type(self) -> "JSONStorageType":
|
|
1707
|
+
if self._json_storage_type is not None:
|
|
1708
|
+
return self._json_storage_type
|
|
1709
|
+
|
|
1710
|
+
version_info = await self._get_version_info()
|
|
1711
|
+
self._json_storage_type = storage_type_from_version(version_info)
|
|
1712
|
+
return self._json_storage_type
|
|
1713
|
+
|
|
1714
|
+
async def _get_version_info(self) -> "OracleVersionInfo | None":
|
|
1715
|
+
if self._oracle_version_info is not None:
|
|
1716
|
+
return self._oracle_version_info
|
|
1717
|
+
|
|
1718
|
+
async with self._config.provide_session() as driver:
|
|
1719
|
+
dictionary = OracledbAsyncDataDictionary()
|
|
1720
|
+
self._oracle_version_info = await dictionary.get_version(driver)
|
|
1721
|
+
|
|
1722
|
+
if self._oracle_version_info is None:
|
|
1723
|
+
logger.warning("Could not detect Oracle version, defaulting to BLOB_JSON storage")
|
|
1724
|
+
|
|
1725
|
+
return self._oracle_version_info
|
|
1726
|
+
|
|
1727
|
+
async def _serialize_json_field(self, value: Any) -> "str | bytes | None":
|
|
1728
|
+
if value is None:
|
|
1729
|
+
return None
|
|
1730
|
+
|
|
1731
|
+
storage_type = await self._detect_json_storage_type()
|
|
1732
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
1733
|
+
return to_json(value)
|
|
1734
|
+
return to_json(value, as_bytes=True)
|
|
1735
|
+
|
|
1736
|
+
async def _deserialize_json_field(self, data: Any) -> "dict[str, Any] | None":
|
|
1737
|
+
if data is None:
|
|
1738
|
+
return None
|
|
1739
|
+
|
|
1740
|
+
if is_async_readable(data) or is_readable(data):
|
|
1741
|
+
data = await _read_lob_async(data)
|
|
1742
|
+
|
|
1743
|
+
return _extract_json_value(data)
|
|
1744
|
+
|
|
1745
|
+
async def _get_create_memory_table_sql(self) -> str:
|
|
1746
|
+
storage_type = await self._detect_json_storage_type()
|
|
1747
|
+
return self._get_create_memory_table_sql_for_type(storage_type)
|
|
1748
|
+
|
|
1749
|
+
def _get_create_memory_table_sql_for_type(self, storage_type: "JSONStorageType") -> str:
|
|
1750
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
1751
|
+
json_columns = """
|
|
1752
|
+
content_json JSON,
|
|
1753
|
+
metadata_json JSON
|
|
1754
|
+
"""
|
|
1755
|
+
elif storage_type == JSONStorageType.BLOB_JSON:
|
|
1756
|
+
json_columns = """
|
|
1757
|
+
content_json BLOB CHECK (content_json IS JSON),
|
|
1758
|
+
metadata_json BLOB CHECK (metadata_json IS JSON)
|
|
1759
|
+
"""
|
|
1760
|
+
else:
|
|
1761
|
+
json_columns = """
|
|
1762
|
+
content_json BLOB,
|
|
1763
|
+
metadata_json BLOB
|
|
1764
|
+
"""
|
|
1765
|
+
|
|
1766
|
+
owner_id_line = f",\n {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
1767
|
+
inmemory_clause = " INMEMORY PRIORITY HIGH" if self._in_memory else ""
|
|
1768
|
+
|
|
1769
|
+
fts_index = ""
|
|
1770
|
+
if self._use_fts:
|
|
1771
|
+
fts_index = f"""
|
|
1772
|
+
BEGIN
|
|
1773
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._memory_table}_fts
|
|
1774
|
+
ON {self._memory_table}(content_text) INDEXTYPE IS CTXSYS.CONTEXT';
|
|
1775
|
+
EXCEPTION
|
|
1776
|
+
WHEN OTHERS THEN
|
|
1777
|
+
IF SQLCODE != -955 THEN
|
|
1778
|
+
RAISE;
|
|
1779
|
+
END IF;
|
|
1780
|
+
END;
|
|
1781
|
+
"""
|
|
1782
|
+
|
|
1783
|
+
return f"""
|
|
1784
|
+
BEGIN
|
|
1785
|
+
EXECUTE IMMEDIATE 'CREATE TABLE {self._memory_table} (
|
|
1786
|
+
id VARCHAR2(128) PRIMARY KEY,
|
|
1787
|
+
session_id VARCHAR2(128) NOT NULL,
|
|
1788
|
+
app_name VARCHAR2(128) NOT NULL,
|
|
1789
|
+
user_id VARCHAR2(128) NOT NULL,
|
|
1790
|
+
event_id VARCHAR2(128) NOT NULL UNIQUE,
|
|
1791
|
+
author VARCHAR2(256){owner_id_line},
|
|
1792
|
+
timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
1793
|
+
{json_columns},
|
|
1794
|
+
content_text CLOB NOT NULL,
|
|
1795
|
+
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL
|
|
1796
|
+
){inmemory_clause}';
|
|
1797
|
+
EXCEPTION
|
|
1798
|
+
WHEN OTHERS THEN
|
|
1799
|
+
IF SQLCODE != -955 THEN
|
|
1800
|
+
RAISE;
|
|
1801
|
+
END IF;
|
|
1802
|
+
END;
|
|
1803
|
+
|
|
1804
|
+
BEGIN
|
|
1805
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._memory_table}_app_user_time
|
|
1806
|
+
ON {self._memory_table}(app_name, user_id, timestamp DESC)';
|
|
1807
|
+
EXCEPTION
|
|
1808
|
+
WHEN OTHERS THEN
|
|
1809
|
+
IF SQLCODE != -955 THEN
|
|
1810
|
+
RAISE;
|
|
1811
|
+
END IF;
|
|
1812
|
+
END;
|
|
1813
|
+
|
|
1814
|
+
BEGIN
|
|
1815
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._memory_table}_session
|
|
1816
|
+
ON {self._memory_table}(session_id)';
|
|
1817
|
+
EXCEPTION
|
|
1818
|
+
WHEN OTHERS THEN
|
|
1819
|
+
IF SQLCODE != -955 THEN
|
|
1820
|
+
RAISE;
|
|
1821
|
+
END IF;
|
|
1822
|
+
END;
|
|
1823
|
+
{fts_index}
|
|
1824
|
+
"""
|
|
1825
|
+
|
|
1826
|
+
def _get_drop_memory_table_sql(self) -> "list[str]":
|
|
1827
|
+
return [
|
|
1828
|
+
f"""
|
|
1829
|
+
BEGIN
|
|
1830
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._memory_table}_session';
|
|
1831
|
+
EXCEPTION
|
|
1832
|
+
WHEN OTHERS THEN
|
|
1833
|
+
IF SQLCODE != -1418 THEN
|
|
1834
|
+
RAISE;
|
|
1835
|
+
END IF;
|
|
1836
|
+
END;
|
|
1837
|
+
""",
|
|
1838
|
+
f"""
|
|
1839
|
+
BEGIN
|
|
1840
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._memory_table}_app_user_time';
|
|
1841
|
+
EXCEPTION
|
|
1842
|
+
WHEN OTHERS THEN
|
|
1843
|
+
IF SQLCODE != -1418 THEN
|
|
1844
|
+
RAISE;
|
|
1845
|
+
END IF;
|
|
1846
|
+
END;
|
|
1847
|
+
""",
|
|
1848
|
+
f"""
|
|
1849
|
+
BEGIN
|
|
1850
|
+
EXECUTE IMMEDIATE 'DROP TABLE {self._memory_table}';
|
|
1851
|
+
EXCEPTION
|
|
1852
|
+
WHEN OTHERS THEN
|
|
1853
|
+
IF SQLCODE != -942 THEN
|
|
1854
|
+
RAISE;
|
|
1855
|
+
END IF;
|
|
1856
|
+
END;
|
|
1857
|
+
""",
|
|
1858
|
+
]
|
|
1859
|
+
|
|
1860
|
+
async def create_tables(self) -> None:
|
|
1861
|
+
if not self._enabled:
|
|
1862
|
+
return
|
|
1863
|
+
|
|
1864
|
+
async with self._config.provide_session() as driver:
|
|
1865
|
+
await driver.execute_script(await self._get_create_memory_table_sql())
|
|
1866
|
+
|
|
1867
|
+
async def _execute_insert_entry(self, cursor: Any, sql: str, params: "dict[str, Any]") -> bool:
|
|
1868
|
+
"""Execute an insert and skip duplicate key errors."""
|
|
1869
|
+
try:
|
|
1870
|
+
await cursor.execute(sql, params)
|
|
1871
|
+
except oracledb.DatabaseError as exc:
|
|
1872
|
+
error_obj = exc.args[0] if exc.args else None
|
|
1873
|
+
if error_obj and error_obj.code == ORACLE_DUPLICATE_KEY_ERROR:
|
|
1874
|
+
return False
|
|
1875
|
+
raise
|
|
1876
|
+
return True
|
|
1877
|
+
|
|
1878
|
+
async def insert_memory_entries(self, entries: "list[MemoryRecord]", owner_id: "object | None" = None) -> int:
|
|
1879
|
+
if not self._enabled:
|
|
1880
|
+
msg = "Memory store is disabled"
|
|
1881
|
+
raise RuntimeError(msg)
|
|
1882
|
+
|
|
1883
|
+
if not entries:
|
|
1884
|
+
return 0
|
|
1885
|
+
|
|
1886
|
+
owner_column = f", {self._owner_id_column_name}" if self._owner_id_column_name else ""
|
|
1887
|
+
owner_param = ", :owner_id" if self._owner_id_column_name else ""
|
|
1888
|
+
sql = f"""
|
|
1889
|
+
INSERT INTO {self._memory_table} (
|
|
1890
|
+
id, session_id, app_name, user_id, event_id, author{owner_column},
|
|
1891
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
1892
|
+
) VALUES (
|
|
1893
|
+
:id, :session_id, :app_name, :user_id, :event_id, :author{owner_param},
|
|
1894
|
+
:timestamp, :content_json, :content_text, :metadata_json, :inserted_at
|
|
1895
|
+
)
|
|
1896
|
+
"""
|
|
1897
|
+
|
|
1898
|
+
inserted_count = 0
|
|
1899
|
+
async with self._config.provide_connection() as conn:
|
|
1900
|
+
cursor = conn.cursor()
|
|
1901
|
+
for entry in entries:
|
|
1902
|
+
content_json = await self._serialize_json_field(entry["content_json"])
|
|
1903
|
+
metadata_json = await self._serialize_json_field(entry["metadata_json"])
|
|
1904
|
+
params = {
|
|
1905
|
+
"id": entry["id"],
|
|
1906
|
+
"session_id": entry["session_id"],
|
|
1907
|
+
"app_name": entry["app_name"],
|
|
1908
|
+
"user_id": entry["user_id"],
|
|
1909
|
+
"event_id": entry["event_id"],
|
|
1910
|
+
"author": entry["author"],
|
|
1911
|
+
"timestamp": entry["timestamp"],
|
|
1912
|
+
"content_json": content_json,
|
|
1913
|
+
"content_text": entry["content_text"],
|
|
1914
|
+
"metadata_json": metadata_json,
|
|
1915
|
+
"inserted_at": entry["inserted_at"],
|
|
1916
|
+
}
|
|
1917
|
+
if self._owner_id_column_name:
|
|
1918
|
+
params["owner_id"] = str(owner_id) if owner_id is not None else None
|
|
1919
|
+
if await self._execute_insert_entry(cursor, sql, params):
|
|
1920
|
+
inserted_count += 1
|
|
1921
|
+
await conn.commit()
|
|
1922
|
+
|
|
1923
|
+
return inserted_count
|
|
1924
|
+
|
|
1925
|
+
async def search_entries(
|
|
1926
|
+
self, query: str, app_name: str, user_id: str, limit: "int | None" = None
|
|
1927
|
+
) -> "list[MemoryRecord]":
|
|
1928
|
+
if not self._enabled:
|
|
1929
|
+
msg = "Memory store is disabled"
|
|
1930
|
+
raise RuntimeError(msg)
|
|
1931
|
+
|
|
1932
|
+
effective_limit = limit if limit is not None else self._max_results
|
|
1933
|
+
|
|
1934
|
+
try:
|
|
1935
|
+
if self._use_fts:
|
|
1936
|
+
return await self._search_entries_fts(query, app_name, user_id, effective_limit)
|
|
1937
|
+
return await self._search_entries_simple(query, app_name, user_id, effective_limit)
|
|
1938
|
+
except oracledb.DatabaseError as exc:
|
|
1939
|
+
error_obj = exc.args[0] if exc.args else None
|
|
1940
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
1941
|
+
return []
|
|
1942
|
+
raise
|
|
1943
|
+
|
|
1944
|
+
async def _search_entries_fts(self, query: str, app_name: str, user_id: str, limit: int) -> "list[MemoryRecord]":
|
|
1945
|
+
sql = f"""
|
|
1946
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
1947
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
1948
|
+
FROM (
|
|
1949
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
1950
|
+
timestamp, content_json, content_text, metadata_json, inserted_at,
|
|
1951
|
+
SCORE(1) AS score
|
|
1952
|
+
FROM {self._memory_table}
|
|
1953
|
+
WHERE app_name = :app_name
|
|
1954
|
+
AND user_id = :user_id
|
|
1955
|
+
AND CONTAINS(content_text, :query, 1) > 0
|
|
1956
|
+
ORDER BY score DESC, timestamp DESC
|
|
1957
|
+
)
|
|
1958
|
+
WHERE ROWNUM <= :limit
|
|
1959
|
+
"""
|
|
1960
|
+
params = {"app_name": app_name, "user_id": user_id, "query": query, "limit": limit}
|
|
1961
|
+
async with self._config.provide_connection() as conn:
|
|
1962
|
+
cursor = conn.cursor()
|
|
1963
|
+
await cursor.execute(sql, params)
|
|
1964
|
+
rows = await cursor.fetchall()
|
|
1965
|
+
return await self._rows_to_records(rows)
|
|
1966
|
+
|
|
1967
|
+
async def _search_entries_simple(self, query: str, app_name: str, user_id: str, limit: int) -> "list[MemoryRecord]":
|
|
1968
|
+
sql = f"""
|
|
1969
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
1970
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
1971
|
+
FROM (
|
|
1972
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
1973
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
1974
|
+
FROM {self._memory_table}
|
|
1975
|
+
WHERE app_name = :app_name
|
|
1976
|
+
AND user_id = :user_id
|
|
1977
|
+
AND LOWER(content_text) LIKE :pattern
|
|
1978
|
+
ORDER BY timestamp DESC
|
|
1979
|
+
)
|
|
1980
|
+
WHERE ROWNUM <= :limit
|
|
1981
|
+
"""
|
|
1982
|
+
pattern = f"%{query.lower()}%"
|
|
1983
|
+
params = {"app_name": app_name, "user_id": user_id, "pattern": pattern, "limit": limit}
|
|
1984
|
+
async with self._config.provide_connection() as conn:
|
|
1985
|
+
cursor = conn.cursor()
|
|
1986
|
+
await cursor.execute(sql, params)
|
|
1987
|
+
rows = await cursor.fetchall()
|
|
1988
|
+
return await self._rows_to_records(rows)
|
|
1989
|
+
|
|
1990
|
+
async def delete_entries_by_session(self, session_id: str) -> int:
|
|
1991
|
+
sql = f"DELETE FROM {self._memory_table} WHERE session_id = :session_id"
|
|
1992
|
+
async with self._config.provide_connection() as conn:
|
|
1993
|
+
cursor = conn.cursor()
|
|
1994
|
+
await cursor.execute(sql, {"session_id": session_id})
|
|
1995
|
+
await conn.commit()
|
|
1996
|
+
return cursor.rowcount if cursor.rowcount and cursor.rowcount > 0 else 0
|
|
1997
|
+
|
|
1998
|
+
async def delete_entries_older_than(self, days: int) -> int:
|
|
1999
|
+
sql = f"""
|
|
2000
|
+
DELETE FROM {self._memory_table}
|
|
2001
|
+
WHERE inserted_at < SYSTIMESTAMP - NUMTODSINTERVAL(:days, 'DAY')
|
|
2002
|
+
"""
|
|
2003
|
+
async with self._config.provide_connection() as conn:
|
|
2004
|
+
cursor = conn.cursor()
|
|
2005
|
+
await cursor.execute(sql, {"days": days})
|
|
2006
|
+
await conn.commit()
|
|
2007
|
+
return cursor.rowcount if cursor.rowcount and cursor.rowcount > 0 else 0
|
|
2008
|
+
|
|
2009
|
+
async def _rows_to_records(self, rows: "list[Any]") -> "list[MemoryRecord]":
|
|
2010
|
+
records: list[MemoryRecord] = []
|
|
2011
|
+
for row in rows:
|
|
2012
|
+
content_json = await self._deserialize_json_field(row[7]) if row[7] is not None else {}
|
|
2013
|
+
metadata_json = await self._deserialize_json_field(row[9])
|
|
2014
|
+
content_text = row[8]
|
|
2015
|
+
if is_async_readable(content_text) or is_readable(content_text):
|
|
2016
|
+
content_text = await _read_lob_async(content_text)
|
|
2017
|
+
records.append({
|
|
2018
|
+
"id": row[0],
|
|
2019
|
+
"session_id": row[1],
|
|
2020
|
+
"app_name": row[2],
|
|
2021
|
+
"user_id": row[3],
|
|
2022
|
+
"event_id": row[4],
|
|
2023
|
+
"author": row[5],
|
|
2024
|
+
"timestamp": row[6],
|
|
2025
|
+
"content_json": cast("dict[str, Any]", content_json),
|
|
2026
|
+
"content_text": str(content_text),
|
|
2027
|
+
"metadata_json": metadata_json,
|
|
2028
|
+
"inserted_at": row[10],
|
|
2029
|
+
})
|
|
2030
|
+
return records
|
|
2031
|
+
|
|
2032
|
+
|
|
2033
|
+
class OracleSyncADKMemoryStore(BaseSyncADKMemoryStore["OracleSyncConfig"]):
|
|
2034
|
+
"""Oracle ADK memory store using sync oracledb driver."""
|
|
2035
|
+
|
|
2036
|
+
__slots__ = ("_in_memory", "_json_storage_type", "_oracle_version_info")
|
|
2037
|
+
|
|
2038
|
+
def __init__(self, config: "OracleSyncConfig") -> None:
|
|
2039
|
+
super().__init__(config)
|
|
2040
|
+
self._json_storage_type: JSONStorageType | None = None
|
|
2041
|
+
self._oracle_version_info: OracleVersionInfo | None = None
|
|
2042
|
+
adk_config = config.extension_config.get("adk", {})
|
|
2043
|
+
self._in_memory = bool(adk_config.get("in_memory", False))
|
|
2044
|
+
|
|
2045
|
+
def _detect_json_storage_type(self) -> "JSONStorageType":
|
|
2046
|
+
if self._json_storage_type is not None:
|
|
2047
|
+
return self._json_storage_type
|
|
2048
|
+
|
|
2049
|
+
version_info = self._get_version_info()
|
|
2050
|
+
self._json_storage_type = storage_type_from_version(version_info)
|
|
2051
|
+
return self._json_storage_type
|
|
2052
|
+
|
|
2053
|
+
def _get_version_info(self) -> "OracleVersionInfo | None":
|
|
2054
|
+
if self._oracle_version_info is not None:
|
|
2055
|
+
return self._oracle_version_info
|
|
2056
|
+
|
|
2057
|
+
with self._config.provide_session() as driver:
|
|
2058
|
+
dictionary = OracledbSyncDataDictionary()
|
|
2059
|
+
self._oracle_version_info = dictionary.get_version(driver)
|
|
2060
|
+
|
|
2061
|
+
if self._oracle_version_info is None:
|
|
2062
|
+
logger.warning("Could not detect Oracle version, defaulting to BLOB_JSON storage")
|
|
2063
|
+
|
|
2064
|
+
return self._oracle_version_info
|
|
2065
|
+
|
|
2066
|
+
def _serialize_json_field(self, value: Any) -> "str | bytes | None":
|
|
2067
|
+
if value is None:
|
|
2068
|
+
return None
|
|
2069
|
+
|
|
2070
|
+
storage_type = self._detect_json_storage_type()
|
|
2071
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
2072
|
+
return to_json(value)
|
|
2073
|
+
return to_json(value, as_bytes=True)
|
|
2074
|
+
|
|
2075
|
+
def _deserialize_json_field(self, data: Any) -> "dict[str, Any] | None":
|
|
2076
|
+
if data is None:
|
|
2077
|
+
return None
|
|
2078
|
+
|
|
2079
|
+
if is_readable(data):
|
|
2080
|
+
data = _read_lob_sync(data)
|
|
2081
|
+
|
|
2082
|
+
return _extract_json_value(data)
|
|
2083
|
+
|
|
2084
|
+
def _get_create_memory_table_sql(self) -> str:
|
|
2085
|
+
storage_type = self._detect_json_storage_type()
|
|
2086
|
+
return self._get_create_memory_table_sql_for_type(storage_type)
|
|
2087
|
+
|
|
2088
|
+
def _get_create_memory_table_sql_for_type(self, storage_type: "JSONStorageType") -> str:
|
|
2089
|
+
if storage_type == JSONStorageType.JSON_NATIVE:
|
|
2090
|
+
json_columns = """
|
|
2091
|
+
content_json JSON,
|
|
2092
|
+
metadata_json JSON
|
|
2093
|
+
"""
|
|
2094
|
+
elif storage_type == JSONStorageType.BLOB_JSON:
|
|
2095
|
+
json_columns = """
|
|
2096
|
+
content_json BLOB CHECK (content_json IS JSON),
|
|
2097
|
+
metadata_json BLOB CHECK (metadata_json IS JSON)
|
|
2098
|
+
"""
|
|
2099
|
+
else:
|
|
2100
|
+
json_columns = """
|
|
2101
|
+
content_json BLOB,
|
|
2102
|
+
metadata_json BLOB
|
|
2103
|
+
"""
|
|
2104
|
+
|
|
2105
|
+
owner_id_line = f",\n {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
2106
|
+
inmemory_clause = " INMEMORY PRIORITY HIGH" if self._in_memory else ""
|
|
2107
|
+
|
|
2108
|
+
fts_index = ""
|
|
2109
|
+
if self._use_fts:
|
|
2110
|
+
fts_index = f"""
|
|
2111
|
+
BEGIN
|
|
2112
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._memory_table}_fts
|
|
2113
|
+
ON {self._memory_table}(content_text) INDEXTYPE IS CTXSYS.CONTEXT';
|
|
2114
|
+
EXCEPTION
|
|
2115
|
+
WHEN OTHERS THEN
|
|
2116
|
+
IF SQLCODE != -955 THEN
|
|
2117
|
+
RAISE;
|
|
2118
|
+
END IF;
|
|
2119
|
+
END;
|
|
2120
|
+
"""
|
|
2121
|
+
|
|
2122
|
+
return f"""
|
|
2123
|
+
BEGIN
|
|
2124
|
+
EXECUTE IMMEDIATE 'CREATE TABLE {self._memory_table} (
|
|
2125
|
+
id VARCHAR2(128) PRIMARY KEY,
|
|
2126
|
+
session_id VARCHAR2(128) NOT NULL,
|
|
2127
|
+
app_name VARCHAR2(128) NOT NULL,
|
|
2128
|
+
user_id VARCHAR2(128) NOT NULL,
|
|
2129
|
+
event_id VARCHAR2(128) NOT NULL UNIQUE,
|
|
2130
|
+
author VARCHAR2(256){owner_id_line},
|
|
2131
|
+
timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
|
|
2132
|
+
{json_columns},
|
|
2133
|
+
content_text CLOB NOT NULL,
|
|
2134
|
+
inserted_at TIMESTAMP WITH TIME ZONE DEFAULT SYSTIMESTAMP NOT NULL
|
|
2135
|
+
){inmemory_clause}';
|
|
2136
|
+
EXCEPTION
|
|
2137
|
+
WHEN OTHERS THEN
|
|
2138
|
+
IF SQLCODE != -955 THEN
|
|
2139
|
+
RAISE;
|
|
2140
|
+
END IF;
|
|
2141
|
+
END;
|
|
2142
|
+
|
|
2143
|
+
BEGIN
|
|
2144
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._memory_table}_app_user_time
|
|
2145
|
+
ON {self._memory_table}(app_name, user_id, timestamp DESC)';
|
|
2146
|
+
EXCEPTION
|
|
2147
|
+
WHEN OTHERS THEN
|
|
2148
|
+
IF SQLCODE != -955 THEN
|
|
2149
|
+
RAISE;
|
|
2150
|
+
END IF;
|
|
2151
|
+
END;
|
|
2152
|
+
|
|
2153
|
+
BEGIN
|
|
2154
|
+
EXECUTE IMMEDIATE 'CREATE INDEX idx_{self._memory_table}_session
|
|
2155
|
+
ON {self._memory_table}(session_id)';
|
|
2156
|
+
EXCEPTION
|
|
2157
|
+
WHEN OTHERS THEN
|
|
2158
|
+
IF SQLCODE != -955 THEN
|
|
2159
|
+
RAISE;
|
|
2160
|
+
END IF;
|
|
2161
|
+
END;
|
|
2162
|
+
{fts_index}
|
|
2163
|
+
"""
|
|
2164
|
+
|
|
2165
|
+
def _get_drop_memory_table_sql(self) -> "list[str]":
|
|
2166
|
+
return [
|
|
2167
|
+
f"""
|
|
2168
|
+
BEGIN
|
|
2169
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._memory_table}_session';
|
|
2170
|
+
EXCEPTION
|
|
2171
|
+
WHEN OTHERS THEN
|
|
2172
|
+
IF SQLCODE != -1418 THEN
|
|
2173
|
+
RAISE;
|
|
2174
|
+
END IF;
|
|
2175
|
+
END;
|
|
2176
|
+
""",
|
|
2177
|
+
f"""
|
|
2178
|
+
BEGIN
|
|
2179
|
+
EXECUTE IMMEDIATE 'DROP INDEX idx_{self._memory_table}_app_user_time';
|
|
2180
|
+
EXCEPTION
|
|
2181
|
+
WHEN OTHERS THEN
|
|
2182
|
+
IF SQLCODE != -1418 THEN
|
|
2183
|
+
RAISE;
|
|
2184
|
+
END IF;
|
|
2185
|
+
END;
|
|
2186
|
+
""",
|
|
2187
|
+
f"""
|
|
2188
|
+
BEGIN
|
|
2189
|
+
EXECUTE IMMEDIATE 'DROP TABLE {self._memory_table}';
|
|
2190
|
+
EXCEPTION
|
|
2191
|
+
WHEN OTHERS THEN
|
|
2192
|
+
IF SQLCODE != -942 THEN
|
|
2193
|
+
RAISE;
|
|
2194
|
+
END IF;
|
|
2195
|
+
END;
|
|
2196
|
+
""",
|
|
2197
|
+
]
|
|
2198
|
+
|
|
2199
|
+
def create_tables(self) -> None:
|
|
2200
|
+
if not self._enabled:
|
|
2201
|
+
return
|
|
2202
|
+
|
|
2203
|
+
with self._config.provide_session() as driver:
|
|
2204
|
+
driver.execute_script(self._get_create_memory_table_sql())
|
|
2205
|
+
|
|
2206
|
+
def _execute_insert_entry(self, cursor: Any, sql: str, params: "dict[str, Any]") -> bool:
|
|
2207
|
+
"""Execute an insert and skip duplicate key errors."""
|
|
2208
|
+
try:
|
|
2209
|
+
cursor.execute(sql, params)
|
|
2210
|
+
except oracledb.DatabaseError as exc:
|
|
2211
|
+
error_obj = exc.args[0] if exc.args else None
|
|
2212
|
+
if error_obj and error_obj.code == ORACLE_DUPLICATE_KEY_ERROR:
|
|
2213
|
+
return False
|
|
2214
|
+
raise
|
|
2215
|
+
return True
|
|
2216
|
+
|
|
2217
|
+
def insert_memory_entries(self, entries: "list[MemoryRecord]", owner_id: "object | None" = None) -> int:
|
|
2218
|
+
if not self._enabled:
|
|
2219
|
+
msg = "Memory store is disabled"
|
|
2220
|
+
raise RuntimeError(msg)
|
|
2221
|
+
|
|
2222
|
+
if not entries:
|
|
2223
|
+
return 0
|
|
2224
|
+
|
|
2225
|
+
owner_column = f", {self._owner_id_column_name}" if self._owner_id_column_name else ""
|
|
2226
|
+
owner_param = ", :owner_id" if self._owner_id_column_name else ""
|
|
2227
|
+
sql = f"""
|
|
2228
|
+
INSERT INTO {self._memory_table} (
|
|
2229
|
+
id, session_id, app_name, user_id, event_id, author{owner_column},
|
|
2230
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
2231
|
+
) VALUES (
|
|
2232
|
+
:id, :session_id, :app_name, :user_id, :event_id, :author{owner_param},
|
|
2233
|
+
:timestamp, :content_json, :content_text, :metadata_json, :inserted_at
|
|
2234
|
+
)
|
|
2235
|
+
"""
|
|
2236
|
+
|
|
2237
|
+
inserted_count = 0
|
|
2238
|
+
with self._config.provide_connection() as conn:
|
|
2239
|
+
cursor = conn.cursor()
|
|
2240
|
+
for entry in entries:
|
|
2241
|
+
content_json = self._serialize_json_field(entry["content_json"])
|
|
2242
|
+
metadata_json = self._serialize_json_field(entry["metadata_json"])
|
|
2243
|
+
params = {
|
|
2244
|
+
"id": entry["id"],
|
|
2245
|
+
"session_id": entry["session_id"],
|
|
2246
|
+
"app_name": entry["app_name"],
|
|
2247
|
+
"user_id": entry["user_id"],
|
|
2248
|
+
"event_id": entry["event_id"],
|
|
2249
|
+
"author": entry["author"],
|
|
2250
|
+
"timestamp": entry["timestamp"],
|
|
2251
|
+
"content_json": content_json,
|
|
2252
|
+
"content_text": entry["content_text"],
|
|
2253
|
+
"metadata_json": metadata_json,
|
|
2254
|
+
"inserted_at": entry["inserted_at"],
|
|
2255
|
+
}
|
|
2256
|
+
if self._owner_id_column_name:
|
|
2257
|
+
params["owner_id"] = str(owner_id) if owner_id is not None else None
|
|
2258
|
+
if self._execute_insert_entry(cursor, sql, params):
|
|
2259
|
+
inserted_count += 1
|
|
2260
|
+
conn.commit()
|
|
2261
|
+
|
|
2262
|
+
return inserted_count
|
|
2263
|
+
|
|
2264
|
+
def search_entries(
|
|
2265
|
+
self, query: str, app_name: str, user_id: str, limit: "int | None" = None
|
|
2266
|
+
) -> "list[MemoryRecord]":
|
|
2267
|
+
if not self._enabled:
|
|
2268
|
+
msg = "Memory store is disabled"
|
|
2269
|
+
raise RuntimeError(msg)
|
|
2270
|
+
|
|
2271
|
+
effective_limit = limit if limit is not None else self._max_results
|
|
2272
|
+
|
|
2273
|
+
try:
|
|
2274
|
+
if self._use_fts:
|
|
2275
|
+
return self._search_entries_fts(query, app_name, user_id, effective_limit)
|
|
2276
|
+
return self._search_entries_simple(query, app_name, user_id, effective_limit)
|
|
2277
|
+
except oracledb.DatabaseError as exc:
|
|
2278
|
+
error_obj = exc.args[0] if exc.args else None
|
|
2279
|
+
if error_obj and error_obj.code == ORACLE_TABLE_NOT_FOUND_ERROR:
|
|
2280
|
+
return []
|
|
2281
|
+
raise
|
|
2282
|
+
|
|
2283
|
+
def _search_entries_fts(self, query: str, app_name: str, user_id: str, limit: int) -> "list[MemoryRecord]":
|
|
2284
|
+
sql = f"""
|
|
2285
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
2286
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
2287
|
+
FROM (
|
|
2288
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
2289
|
+
timestamp, content_json, content_text, metadata_json, inserted_at,
|
|
2290
|
+
SCORE(1) AS score
|
|
2291
|
+
FROM {self._memory_table}
|
|
2292
|
+
WHERE app_name = :app_name
|
|
2293
|
+
AND user_id = :user_id
|
|
2294
|
+
AND CONTAINS(content_text, :query, 1) > 0
|
|
2295
|
+
ORDER BY score DESC, timestamp DESC
|
|
2296
|
+
)
|
|
2297
|
+
WHERE ROWNUM <= :limit
|
|
2298
|
+
"""
|
|
2299
|
+
params = {"app_name": app_name, "user_id": user_id, "query": query, "limit": limit}
|
|
2300
|
+
with self._config.provide_connection() as conn:
|
|
2301
|
+
cursor = conn.cursor()
|
|
2302
|
+
cursor.execute(sql, params)
|
|
2303
|
+
rows = cursor.fetchall()
|
|
2304
|
+
return self._rows_to_records(rows)
|
|
2305
|
+
|
|
2306
|
+
def _search_entries_simple(self, query: str, app_name: str, user_id: str, limit: int) -> "list[MemoryRecord]":
|
|
2307
|
+
sql = f"""
|
|
2308
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
2309
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
2310
|
+
FROM (
|
|
2311
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
2312
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
2313
|
+
FROM {self._memory_table}
|
|
2314
|
+
WHERE app_name = :app_name
|
|
2315
|
+
AND user_id = :user_id
|
|
2316
|
+
AND LOWER(content_text) LIKE :pattern
|
|
2317
|
+
ORDER BY timestamp DESC
|
|
2318
|
+
)
|
|
2319
|
+
WHERE ROWNUM <= :limit
|
|
2320
|
+
"""
|
|
2321
|
+
pattern = f"%{query.lower()}%"
|
|
2322
|
+
params = {"app_name": app_name, "user_id": user_id, "pattern": pattern, "limit": limit}
|
|
2323
|
+
with self._config.provide_connection() as conn:
|
|
2324
|
+
cursor = conn.cursor()
|
|
2325
|
+
cursor.execute(sql, params)
|
|
2326
|
+
rows = cursor.fetchall()
|
|
2327
|
+
return self._rows_to_records(rows)
|
|
2328
|
+
|
|
2329
|
+
def delete_entries_by_session(self, session_id: str) -> int:
|
|
2330
|
+
sql = f"DELETE FROM {self._memory_table} WHERE session_id = :session_id"
|
|
2331
|
+
with self._config.provide_connection() as conn:
|
|
2332
|
+
cursor = conn.cursor()
|
|
2333
|
+
cursor.execute(sql, {"session_id": session_id})
|
|
2334
|
+
conn.commit()
|
|
2335
|
+
return cursor.rowcount if cursor.rowcount and cursor.rowcount > 0 else 0
|
|
2336
|
+
|
|
2337
|
+
def delete_entries_older_than(self, days: int) -> int:
|
|
2338
|
+
sql = f"""
|
|
2339
|
+
DELETE FROM {self._memory_table}
|
|
2340
|
+
WHERE inserted_at < SYSTIMESTAMP - NUMTODSINTERVAL(:days, 'DAY')
|
|
2341
|
+
"""
|
|
2342
|
+
with self._config.provide_connection() as conn:
|
|
2343
|
+
cursor = conn.cursor()
|
|
2344
|
+
cursor.execute(sql, {"days": days})
|
|
2345
|
+
conn.commit()
|
|
2346
|
+
return cursor.rowcount if cursor.rowcount and cursor.rowcount > 0 else 0
|
|
2347
|
+
|
|
2348
|
+
def _rows_to_records(self, rows: "list[Any]") -> "list[MemoryRecord]":
|
|
2349
|
+
records: list[MemoryRecord] = []
|
|
2350
|
+
for row in rows:
|
|
2351
|
+
content_json = self._deserialize_json_field(row[7]) if row[7] is not None else {}
|
|
2352
|
+
metadata_json = self._deserialize_json_field(row[9])
|
|
2353
|
+
content_text = row[8]
|
|
2354
|
+
if is_readable(content_text):
|
|
2355
|
+
content_text = _read_lob_sync(content_text)
|
|
2356
|
+
records.append({
|
|
2357
|
+
"id": row[0],
|
|
2358
|
+
"session_id": row[1],
|
|
2359
|
+
"app_name": row[2],
|
|
2360
|
+
"user_id": row[3],
|
|
2361
|
+
"event_id": row[4],
|
|
2362
|
+
"author": row[5],
|
|
2363
|
+
"timestamp": row[6],
|
|
2364
|
+
"content_json": cast("dict[str, Any]", content_json),
|
|
2365
|
+
"content_text": str(content_text),
|
|
2366
|
+
"metadata_json": metadata_json,
|
|
2367
|
+
"inserted_at": row[10],
|
|
2368
|
+
})
|
|
2369
|
+
return records
|