sqlspec 0.36.0__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ac8f31065839703b4e70__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/__init__.py +140 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +315 -0
- sqlspec/_typing.py +700 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_typing.py +82 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +1273 -0
- sqlspec/adapters/adbc/config.py +295 -0
- sqlspec/adapters/adbc/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/core.py +735 -0
- sqlspec/adapters/adbc/data_dictionary.py +334 -0
- sqlspec/adapters/adbc/driver.py +529 -0
- sqlspec/adapters/adbc/events/__init__.py +5 -0
- sqlspec/adapters/adbc/events/store.py +285 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +502 -0
- sqlspec/adapters/adbc/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/adbc/type_converter.py +140 -0
- sqlspec/adapters/aiosqlite/__init__.py +25 -0
- sqlspec/adapters/aiosqlite/_typing.py +82 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +818 -0
- sqlspec/adapters/aiosqlite/config.py +334 -0
- sqlspec/adapters/aiosqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/aiosqlite/core.py +315 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +208 -0
- sqlspec/adapters/aiosqlite/driver.py +313 -0
- sqlspec/adapters/aiosqlite/events/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/events/store.py +20 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +279 -0
- sqlspec/adapters/aiosqlite/pool.py +533 -0
- sqlspec/adapters/asyncmy/__init__.py +21 -0
- sqlspec/adapters/asyncmy/_typing.py +87 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +703 -0
- sqlspec/adapters/asyncmy/config.py +302 -0
- sqlspec/adapters/asyncmy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncmy/core.py +360 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +124 -0
- sqlspec/adapters/asyncmy/driver.py +383 -0
- sqlspec/adapters/asyncmy/events/__init__.py +5 -0
- sqlspec/adapters/asyncmy/events/store.py +104 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +19 -0
- sqlspec/adapters/asyncpg/_typing.py +88 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +748 -0
- sqlspec/adapters/asyncpg/config.py +569 -0
- sqlspec/adapters/asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/asyncpg/core.py +367 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +162 -0
- sqlspec/adapters/asyncpg/driver.py +487 -0
- sqlspec/adapters/asyncpg/events/__init__.py +6 -0
- sqlspec/adapters/asyncpg/events/backend.py +286 -0
- sqlspec/adapters/asyncpg/events/store.py +40 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +251 -0
- sqlspec/adapters/bigquery/__init__.py +14 -0
- sqlspec/adapters/bigquery/_typing.py +86 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +827 -0
- sqlspec/adapters/bigquery/config.py +353 -0
- sqlspec/adapters/bigquery/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/core.py +715 -0
- sqlspec/adapters/bigquery/data_dictionary.py +128 -0
- sqlspec/adapters/bigquery/driver.py +548 -0
- sqlspec/adapters/bigquery/events/__init__.py +5 -0
- sqlspec/adapters/bigquery/events/store.py +139 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +325 -0
- sqlspec/adapters/bigquery/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/bigquery/type_converter.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/__init__.py +24 -0
- sqlspec/adapters/cockroach_asyncpg/_typing.py +72 -0
- sqlspec/adapters/cockroach_asyncpg/adk/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/adk/store.py +410 -0
- sqlspec/adapters/cockroach_asyncpg/config.py +238 -0
- sqlspec/adapters/cockroach_asyncpg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_asyncpg/core.py +55 -0
- sqlspec/adapters/cockroach_asyncpg/data_dictionary.py +107 -0
- sqlspec/adapters/cockroach_asyncpg/driver.py +144 -0
- sqlspec/adapters/cockroach_asyncpg/events/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/events/store.py +20 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_asyncpg/litestar/store.py +142 -0
- sqlspec/adapters/cockroach_psycopg/__init__.py +38 -0
- sqlspec/adapters/cockroach_psycopg/_typing.py +129 -0
- sqlspec/adapters/cockroach_psycopg/adk/__init__.py +13 -0
- sqlspec/adapters/cockroach_psycopg/adk/store.py +868 -0
- sqlspec/adapters/cockroach_psycopg/config.py +484 -0
- sqlspec/adapters/cockroach_psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/cockroach_psycopg/core.py +63 -0
- sqlspec/adapters/cockroach_psycopg/data_dictionary.py +215 -0
- sqlspec/adapters/cockroach_psycopg/driver.py +284 -0
- sqlspec/adapters/cockroach_psycopg/events/__init__.py +6 -0
- sqlspec/adapters/cockroach_psycopg/events/store.py +34 -0
- sqlspec/adapters/cockroach_psycopg/litestar/__init__.py +3 -0
- sqlspec/adapters/cockroach_psycopg/litestar/store.py +325 -0
- sqlspec/adapters/duckdb/__init__.py +25 -0
- sqlspec/adapters/duckdb/_typing.py +81 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +850 -0
- sqlspec/adapters/duckdb/config.py +463 -0
- sqlspec/adapters/duckdb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/core.py +257 -0
- sqlspec/adapters/duckdb/data_dictionary.py +140 -0
- sqlspec/adapters/duckdb/driver.py +430 -0
- sqlspec/adapters/duckdb/events/__init__.py +5 -0
- sqlspec/adapters/duckdb/events/store.py +57 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +330 -0
- sqlspec/adapters/duckdb/pool.py +293 -0
- sqlspec/adapters/duckdb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/duckdb/type_converter.py +118 -0
- sqlspec/adapters/mock/__init__.py +72 -0
- sqlspec/adapters/mock/_typing.py +147 -0
- sqlspec/adapters/mock/config.py +483 -0
- sqlspec/adapters/mock/core.py +319 -0
- sqlspec/adapters/mock/data_dictionary.py +366 -0
- sqlspec/adapters/mock/driver.py +721 -0
- sqlspec/adapters/mysqlconnector/__init__.py +36 -0
- sqlspec/adapters/mysqlconnector/_typing.py +141 -0
- sqlspec/adapters/mysqlconnector/adk/__init__.py +15 -0
- sqlspec/adapters/mysqlconnector/adk/store.py +1060 -0
- sqlspec/adapters/mysqlconnector/config.py +394 -0
- sqlspec/adapters/mysqlconnector/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/mysqlconnector/core.py +303 -0
- sqlspec/adapters/mysqlconnector/data_dictionary.py +235 -0
- sqlspec/adapters/mysqlconnector/driver.py +483 -0
- sqlspec/adapters/mysqlconnector/events/__init__.py +8 -0
- sqlspec/adapters/mysqlconnector/events/store.py +98 -0
- sqlspec/adapters/mysqlconnector/litestar/__init__.py +5 -0
- sqlspec/adapters/mysqlconnector/litestar/store.py +426 -0
- sqlspec/adapters/oracledb/__init__.py +60 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +141 -0
- sqlspec/adapters/oracledb/_typing.py +182 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +166 -0
- sqlspec/adapters/oracledb/adk/__init__.py +10 -0
- sqlspec/adapters/oracledb/adk/store.py +2369 -0
- sqlspec/adapters/oracledb/config.py +550 -0
- sqlspec/adapters/oracledb/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/core.py +543 -0
- sqlspec/adapters/oracledb/data_dictionary.py +536 -0
- sqlspec/adapters/oracledb/driver.py +1229 -0
- sqlspec/adapters/oracledb/events/__init__.py +16 -0
- sqlspec/adapters/oracledb/events/backend.py +347 -0
- sqlspec/adapters/oracledb/events/store.py +420 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +781 -0
- sqlspec/adapters/oracledb/migrations.py +535 -0
- sqlspec/adapters/oracledb/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/oracledb/type_converter.py +211 -0
- sqlspec/adapters/psqlpy/__init__.py +17 -0
- sqlspec/adapters/psqlpy/_typing.py +79 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +766 -0
- sqlspec/adapters/psqlpy/config.py +304 -0
- sqlspec/adapters/psqlpy/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/core.py +480 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +126 -0
- sqlspec/adapters/psqlpy/driver.py +438 -0
- sqlspec/adapters/psqlpy/events/__init__.py +6 -0
- sqlspec/adapters/psqlpy/events/backend.py +310 -0
- sqlspec/adapters/psqlpy/events/store.py +20 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +270 -0
- sqlspec/adapters/psqlpy/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psqlpy/type_converter.py +113 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_typing.py +164 -0
- sqlspec/adapters/psycopg/adk/__init__.py +10 -0
- sqlspec/adapters/psycopg/adk/store.py +1387 -0
- sqlspec/adapters/psycopg/config.py +576 -0
- sqlspec/adapters/psycopg/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/core.py +450 -0
- sqlspec/adapters/psycopg/data_dictionary.py +289 -0
- sqlspec/adapters/psycopg/driver.py +975 -0
- sqlspec/adapters/psycopg/events/__init__.py +20 -0
- sqlspec/adapters/psycopg/events/backend.py +458 -0
- sqlspec/adapters/psycopg/events/store.py +42 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +552 -0
- sqlspec/adapters/psycopg/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/psycopg/type_converter.py +93 -0
- sqlspec/adapters/pymysql/__init__.py +21 -0
- sqlspec/adapters/pymysql/_typing.py +71 -0
- sqlspec/adapters/pymysql/adk/__init__.py +5 -0
- sqlspec/adapters/pymysql/adk/store.py +540 -0
- sqlspec/adapters/pymysql/config.py +195 -0
- sqlspec/adapters/pymysql/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/pymysql/core.py +299 -0
- sqlspec/adapters/pymysql/data_dictionary.py +122 -0
- sqlspec/adapters/pymysql/driver.py +259 -0
- sqlspec/adapters/pymysql/events/__init__.py +5 -0
- sqlspec/adapters/pymysql/events/store.py +50 -0
- sqlspec/adapters/pymysql/litestar/__init__.py +5 -0
- sqlspec/adapters/pymysql/litestar/store.py +232 -0
- sqlspec/adapters/pymysql/pool.py +137 -0
- sqlspec/adapters/spanner/__init__.py +40 -0
- sqlspec/adapters/spanner/_typing.py +86 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +732 -0
- sqlspec/adapters/spanner/config.py +352 -0
- sqlspec/adapters/spanner/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/core.py +188 -0
- sqlspec/adapters/spanner/data_dictionary.py +120 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +57 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +130 -0
- sqlspec/adapters/spanner/driver.py +373 -0
- sqlspec/adapters/spanner/events/__init__.py +5 -0
- sqlspec/adapters/spanner/events/store.py +187 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +291 -0
- sqlspec/adapters/spanner/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/spanner/type_converter.py +331 -0
- sqlspec/adapters/sqlite/__init__.py +19 -0
- sqlspec/adapters/sqlite/_typing.py +80 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +958 -0
- sqlspec/adapters/sqlite/config.py +280 -0
- sqlspec/adapters/sqlite/core.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/core.py +312 -0
- sqlspec/adapters/sqlite/data_dictionary.py +202 -0
- sqlspec/adapters/sqlite/driver.py +359 -0
- sqlspec/adapters/sqlite/events/__init__.py +5 -0
- sqlspec/adapters/sqlite/events/store.py +20 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +316 -0
- sqlspec/adapters/sqlite/pool.py +198 -0
- sqlspec/adapters/sqlite/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/adapters/sqlite/type_converter.py +114 -0
- sqlspec/base.py +747 -0
- sqlspec/builder/__init__.py +179 -0
- sqlspec/builder/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_base.py +1022 -0
- sqlspec/builder/_column.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_column.py +521 -0
- sqlspec/builder/_ddl.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_delete.py +95 -0
- sqlspec/builder/_dml.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_dml.py +365 -0
- sqlspec/builder/_explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_explain.py +579 -0
- sqlspec/builder/_expression_wrappers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_factory.py +1697 -0
- sqlspec/builder/_insert.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_insert.py +328 -0
- sqlspec/builder/_join.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_join.py +499 -0
- sqlspec/builder/_merge.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_merge.py +821 -0
- sqlspec/builder/_parsing_utils.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_select.py +1660 -0
- sqlspec/builder/_temporal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_temporal.py +139 -0
- sqlspec/builder/_update.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/builder/_update.py +173 -0
- sqlspec/builder/_vector_expressions.py +267 -0
- sqlspec/cli.py +911 -0
- sqlspec/config.py +1755 -0
- sqlspec/core/__init__.py +374 -0
- sqlspec/core/_correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/_correlation.py +176 -0
- sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/cache.py +1069 -0
- sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/compiler.py +954 -0
- sqlspec/core/explain.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/explain.py +275 -0
- sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/filters.py +952 -0
- sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/hashing.py +262 -0
- sqlspec/core/metrics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +71 -0
- sqlspec/core/parameters/_alignment.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_alignment.py +270 -0
- sqlspec/core/parameters/_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_converter.py +543 -0
- sqlspec/core/parameters/_processor.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_processor.py +505 -0
- sqlspec/core/parameters/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_registry.py +206 -0
- sqlspec/core/parameters/_transformers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_transformers.py +292 -0
- sqlspec/core/parameters/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_types.py +499 -0
- sqlspec/core/parameters/_validator.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/parameters/_validator.py +180 -0
- sqlspec/core/pipeline.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/pipeline.py +319 -0
- sqlspec/core/query_modifiers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/query_modifiers.py +437 -0
- sqlspec/core/result/__init__.py +23 -0
- sqlspec/core/result/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_base.py +1121 -0
- sqlspec/core/result/_io.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/result/_io.py +28 -0
- sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/splitter.py +966 -0
- sqlspec/core/stack.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/statement.py +1503 -0
- sqlspec/core/type_converter.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/core/type_converter.py +339 -0
- sqlspec/data_dictionary/__init__.py +22 -0
- sqlspec/data_dictionary/_loader.py +123 -0
- sqlspec/data_dictionary/_registry.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_registry.py +74 -0
- sqlspec/data_dictionary/_types.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/_types.py +121 -0
- sqlspec/data_dictionary/dialects/__init__.py +21 -0
- sqlspec/data_dictionary/dialects/bigquery.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/bigquery.py +49 -0
- sqlspec/data_dictionary/dialects/cockroachdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/cockroachdb.py +43 -0
- sqlspec/data_dictionary/dialects/duckdb.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/duckdb.py +47 -0
- sqlspec/data_dictionary/dialects/mysql.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/mysql.py +42 -0
- sqlspec/data_dictionary/dialects/oracle.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/oracle.py +34 -0
- sqlspec/data_dictionary/dialects/postgres.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/postgres.py +46 -0
- sqlspec/data_dictionary/dialects/spanner.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/spanner.py +37 -0
- sqlspec/data_dictionary/dialects/sqlite.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/data_dictionary/dialects/sqlite.py +42 -0
- sqlspec/data_dictionary/sql/.gitkeep +0 -0
- sqlspec/data_dictionary/sql/bigquery/columns.sql +23 -0
- sqlspec/data_dictionary/sql/bigquery/foreign_keys.sql +34 -0
- sqlspec/data_dictionary/sql/bigquery/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/bigquery/tables.sql +33 -0
- sqlspec/data_dictionary/sql/bigquery/version.sql +3 -0
- sqlspec/data_dictionary/sql/cockroachdb/columns.sql +34 -0
- sqlspec/data_dictionary/sql/cockroachdb/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/cockroachdb/indexes.sql +32 -0
- sqlspec/data_dictionary/sql/cockroachdb/tables.sql +44 -0
- sqlspec/data_dictionary/sql/cockroachdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/duckdb/columns.sql +23 -0
- sqlspec/data_dictionary/sql/duckdb/foreign_keys.sql +36 -0
- sqlspec/data_dictionary/sql/duckdb/indexes.sql +19 -0
- sqlspec/data_dictionary/sql/duckdb/tables.sql +38 -0
- sqlspec/data_dictionary/sql/duckdb/version.sql +3 -0
- sqlspec/data_dictionary/sql/mysql/columns.sql +23 -0
- sqlspec/data_dictionary/sql/mysql/foreign_keys.sql +28 -0
- sqlspec/data_dictionary/sql/mysql/indexes.sql +26 -0
- sqlspec/data_dictionary/sql/mysql/tables.sql +33 -0
- sqlspec/data_dictionary/sql/mysql/version.sql +3 -0
- sqlspec/data_dictionary/sql/oracle/columns.sql +23 -0
- sqlspec/data_dictionary/sql/oracle/foreign_keys.sql +48 -0
- sqlspec/data_dictionary/sql/oracle/indexes.sql +44 -0
- sqlspec/data_dictionary/sql/oracle/tables.sql +25 -0
- sqlspec/data_dictionary/sql/oracle/version.sql +20 -0
- sqlspec/data_dictionary/sql/postgres/columns.sql +34 -0
- sqlspec/data_dictionary/sql/postgres/foreign_keys.sql +40 -0
- sqlspec/data_dictionary/sql/postgres/indexes.sql +56 -0
- sqlspec/data_dictionary/sql/postgres/tables.sql +44 -0
- sqlspec/data_dictionary/sql/postgres/version.sql +3 -0
- sqlspec/data_dictionary/sql/spanner/columns.sql +23 -0
- sqlspec/data_dictionary/sql/spanner/foreign_keys.sql +70 -0
- sqlspec/data_dictionary/sql/spanner/indexes.sql +30 -0
- sqlspec/data_dictionary/sql/spanner/tables.sql +9 -0
- sqlspec/data_dictionary/sql/spanner/version.sql +3 -0
- sqlspec/data_dictionary/sql/sqlite/columns.sql +23 -0
- sqlspec/data_dictionary/sql/sqlite/foreign_keys.sql +22 -0
- sqlspec/data_dictionary/sql/sqlite/indexes.sql +7 -0
- sqlspec/data_dictionary/sql/sqlite/tables.sql +28 -0
- sqlspec/data_dictionary/sql/sqlite/version.sql +3 -0
- sqlspec/driver/__init__.py +32 -0
- sqlspec/driver/_async.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_async.py +1737 -0
- sqlspec/driver/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_common.py +1478 -0
- sqlspec/driver/_sql_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sql_helpers.py +148 -0
- sqlspec/driver/_storage_helpers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_storage_helpers.py +144 -0
- sqlspec/driver/_sync.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/driver/_sync.py +1710 -0
- sqlspec/exceptions.py +338 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +70 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/memory/__init__.py +69 -0
- sqlspec/extensions/adk/memory/_types.py +30 -0
- sqlspec/extensions/adk/memory/converters.py +149 -0
- sqlspec/extensions/adk/memory/service.py +217 -0
- sqlspec/extensions/adk/memory/store.py +569 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +246 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +225 -0
- sqlspec/extensions/adk/store.py +567 -0
- sqlspec/extensions/events/__init__.py +51 -0
- sqlspec/extensions/events/_channel.py +703 -0
- sqlspec/extensions/events/_hints.py +45 -0
- sqlspec/extensions/events/_models.py +23 -0
- sqlspec/extensions/events/_payload.py +69 -0
- sqlspec/extensions/events/_protocols.py +134 -0
- sqlspec/extensions/events/_queue.py +461 -0
- sqlspec/extensions/events/_store.py +209 -0
- sqlspec/extensions/events/migrations/0001_create_event_queue.py +59 -0
- sqlspec/extensions/events/migrations/__init__.py +3 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +351 -0
- sqlspec/extensions/fastapi/providers.py +607 -0
- sqlspec/extensions/flask/__init__.py +37 -0
- sqlspec/extensions/flask/_state.py +76 -0
- sqlspec/extensions/flask/_utils.py +71 -0
- sqlspec/extensions/flask/extension.py +519 -0
- sqlspec/extensions/litestar/__init__.py +28 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/channels.py +165 -0
- sqlspec/extensions/litestar/cli.py +102 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +671 -0
- sqlspec/extensions/litestar/providers.py +526 -0
- sqlspec/extensions/litestar/store.py +296 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +113 -0
- sqlspec/extensions/starlette/__init__.py +19 -0
- sqlspec/extensions/starlette/_state.py +30 -0
- sqlspec/extensions/starlette/_utils.py +96 -0
- sqlspec/extensions/starlette/extension.py +346 -0
- sqlspec/extensions/starlette/middleware.py +235 -0
- sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/loader.py +702 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +731 -0
- sqlspec/migrations/commands.py +1232 -0
- sqlspec/migrations/context.py +157 -0
- sqlspec/migrations/fix.py +204 -0
- sqlspec/migrations/loaders.py +443 -0
- sqlspec/migrations/runner.py +1172 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +611 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +207 -0
- sqlspec/migrations/version.py +446 -0
- sqlspec/observability/__init__.py +55 -0
- sqlspec/observability/_common.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_common.py +77 -0
- sqlspec/observability/_config.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_config.py +348 -0
- sqlspec/observability/_diagnostics.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_diagnostics.py +74 -0
- sqlspec/observability/_dispatcher.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_dispatcher.py +152 -0
- sqlspec/observability/_formatters/__init__.py +13 -0
- sqlspec/observability/_formatters/_aws.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_aws.py +102 -0
- sqlspec/observability/_formatters/_azure.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_azure.py +96 -0
- sqlspec/observability/_formatters/_base.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_base.py +57 -0
- sqlspec/observability/_formatters/_gcp.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_formatters/_gcp.py +131 -0
- sqlspec/observability/_formatting.py +58 -0
- sqlspec/observability/_observer.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_observer.py +357 -0
- sqlspec/observability/_runtime.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_runtime.py +420 -0
- sqlspec/observability/_sampling.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_sampling.py +188 -0
- sqlspec/observability/_spans.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/observability/_spans.py +161 -0
- sqlspec/protocols.py +916 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +48 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +253 -0
- sqlspec/storage/backends/fsspec.py +529 -0
- sqlspec/storage/backends/local.py +441 -0
- sqlspec/storage/backends/obstore.py +916 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +582 -0
- sqlspec/storage/registry.py +301 -0
- sqlspec/typing.py +395 -0
- sqlspec/utils/__init__.py +7 -0
- sqlspec/utils/arrow_helpers.py +318 -0
- sqlspec/utils/config_tools.py +332 -0
- sqlspec/utils/correlation.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/correlation.py +134 -0
- sqlspec/utils/deprecation.py +190 -0
- sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/fixtures.py +258 -0
- sqlspec/utils/logging.py +222 -0
- sqlspec/utils/module_loader.py +306 -0
- sqlspec/utils/portal.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/portal.py +375 -0
- sqlspec/utils/schema.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/schema.py +485 -0
- sqlspec/utils/serializers.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/serializers.py +408 -0
- sqlspec/utils/singleton.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/sync_tools.py +311 -0
- sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_converters.py +128 -0
- sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/type_guards.py +1360 -0
- sqlspec/utils/uuids.cpython-310-aarch64-linux-gnu.so +0 -0
- sqlspec/utils/uuids.py +225 -0
- sqlspec-0.36.0.dist-info/METADATA +205 -0
- sqlspec-0.36.0.dist-info/RECORD +531 -0
- sqlspec-0.36.0.dist-info/WHEEL +7 -0
- sqlspec-0.36.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.36.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1273 @@
|
|
|
1
|
+
"""ADBC ADK store for Google Agent Development Kit session/event storage."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Final
|
|
5
|
+
|
|
6
|
+
from sqlspec.extensions.adk import BaseSyncADKStore, EventRecord, SessionRecord
|
|
7
|
+
from sqlspec.extensions.adk.memory.store import BaseSyncADKMemoryStore
|
|
8
|
+
from sqlspec.utils.logging import get_logger
|
|
9
|
+
from sqlspec.utils.serializers import from_json, to_json
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from sqlspec.adapters.adbc.config import AdbcConfig
|
|
13
|
+
from sqlspec.extensions.adk import MemoryRecord
|
|
14
|
+
|
|
15
|
+
logger = get_logger("sqlspec.adapters.adbc.adk.store")
|
|
16
|
+
|
|
17
|
+
__all__ = ("AdbcADKMemoryStore", "AdbcADKStore")
|
|
18
|
+
|
|
19
|
+
DIALECT_POSTGRESQL: Final = "postgresql"
|
|
20
|
+
DIALECT_SQLITE: Final = "sqlite"
|
|
21
|
+
DIALECT_DUCKDB: Final = "duckdb"
|
|
22
|
+
DIALECT_SNOWFLAKE: Final = "snowflake"
|
|
23
|
+
DIALECT_GENERIC: Final = "generic"
|
|
24
|
+
|
|
25
|
+
ADBC_TABLE_NOT_FOUND_PATTERNS: Final = ("no such table", "table or view does not exist", "relation does not exist")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class AdbcADKStore(BaseSyncADKStore["AdbcConfig"]):
|
|
29
|
+
"""ADBC synchronous ADK store for Arrow Database Connectivity.
|
|
30
|
+
|
|
31
|
+
Implements session and event storage for Google Agent Development Kit
|
|
32
|
+
using ADBC. ADBC provides a vendor-neutral API with Arrow-native data
|
|
33
|
+
transfer across multiple databases (PostgreSQL, SQLite, DuckDB, etc.).
|
|
34
|
+
|
|
35
|
+
Provides:
|
|
36
|
+
- Session state management with JSON serialization (TEXT storage)
|
|
37
|
+
- Event history tracking with BLOB-serialized actions
|
|
38
|
+
- Timezone-aware timestamps
|
|
39
|
+
- Foreign key constraints with cascade delete
|
|
40
|
+
- Database-agnostic SQL (supports multiple backends)
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
config: AdbcConfig with extension_config["adk"] settings.
|
|
44
|
+
|
|
45
|
+
Example:
|
|
46
|
+
from sqlspec.adapters.adbc import AdbcConfig
|
|
47
|
+
from sqlspec.adapters.adbc.adk import AdbcADKStore
|
|
48
|
+
|
|
49
|
+
config = AdbcConfig(
|
|
50
|
+
connection_config={"driver_name": "sqlite", "uri": ":memory:"},
|
|
51
|
+
extension_config={
|
|
52
|
+
"adk": {
|
|
53
|
+
"session_table": "my_sessions",
|
|
54
|
+
"events_table": "my_events",
|
|
55
|
+
"owner_id_column": "tenant_id INTEGER REFERENCES tenants(id)"
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
)
|
|
59
|
+
store = AdbcADKStore(config)
|
|
60
|
+
store.ensure_tables()
|
|
61
|
+
|
|
62
|
+
Notes:
|
|
63
|
+
- TEXT for JSON storage (compatible across all ADBC backends)
|
|
64
|
+
- BLOB for pre-serialized actions from Google ADK
|
|
65
|
+
- TIMESTAMP for timezone-aware timestamps (driver-dependent precision)
|
|
66
|
+
- INTEGER for booleans (0/1/NULL)
|
|
67
|
+
- Parameter style varies by backend (?, $1, :name, etc.)
|
|
68
|
+
- Uses dialect-agnostic SQL for maximum compatibility
|
|
69
|
+
- State and JSON fields use to_json/from_json for serialization
|
|
70
|
+
- ADBC drivers handle parameter binding automatically
|
|
71
|
+
- Configuration is read from config.extension_config["adk"]
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
__slots__ = ("_dialect",)
|
|
75
|
+
|
|
76
|
+
def __init__(self, config: "AdbcConfig") -> None:
|
|
77
|
+
"""Initialize ADBC ADK store.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
config: AdbcConfig instance (any ADBC driver).
|
|
81
|
+
|
|
82
|
+
Notes:
|
|
83
|
+
Configuration is read from config.extension_config["adk"]:
|
|
84
|
+
- session_table: Sessions table name (default: "adk_sessions")
|
|
85
|
+
- events_table: Events table name (default: "adk_events")
|
|
86
|
+
- owner_id_column: Optional owner FK column DDL (default: None)
|
|
87
|
+
"""
|
|
88
|
+
super().__init__(config)
|
|
89
|
+
self._dialect = self._detect_dialect()
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def dialect(self) -> str:
|
|
93
|
+
"""Return the detected database dialect."""
|
|
94
|
+
return self._dialect
|
|
95
|
+
|
|
96
|
+
def _detect_dialect(self) -> str:
|
|
97
|
+
"""Detect ADBC driver dialect from connection config.
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Dialect identifier for DDL generation.
|
|
101
|
+
|
|
102
|
+
Notes:
|
|
103
|
+
Reads from config.connection_config driver_name.
|
|
104
|
+
Falls back to generic for unknown drivers.
|
|
105
|
+
"""
|
|
106
|
+
driver_name = self._config.connection_config.get("driver_name", "").lower()
|
|
107
|
+
|
|
108
|
+
if "postgres" in driver_name:
|
|
109
|
+
return DIALECT_POSTGRESQL
|
|
110
|
+
if "sqlite" in driver_name:
|
|
111
|
+
return DIALECT_SQLITE
|
|
112
|
+
if "duckdb" in driver_name:
|
|
113
|
+
return DIALECT_DUCKDB
|
|
114
|
+
if "snowflake" in driver_name:
|
|
115
|
+
return DIALECT_SNOWFLAKE
|
|
116
|
+
|
|
117
|
+
logger.warning(
|
|
118
|
+
"Unknown ADBC driver: %s. Using generic SQL dialect. "
|
|
119
|
+
"Consider using a direct adapter for better performance.",
|
|
120
|
+
driver_name,
|
|
121
|
+
)
|
|
122
|
+
return DIALECT_GENERIC
|
|
123
|
+
|
|
124
|
+
def _serialize_state(self, state: "dict[str, Any]") -> str:
|
|
125
|
+
"""Serialize state dictionary to JSON string.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
state: State dictionary to serialize.
|
|
129
|
+
|
|
130
|
+
Returns:
|
|
131
|
+
JSON string.
|
|
132
|
+
"""
|
|
133
|
+
return to_json(state)
|
|
134
|
+
|
|
135
|
+
def _deserialize_state(self, data: Any) -> "dict[str, Any]":
|
|
136
|
+
"""Deserialize state data from JSON string.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
data: JSON string from database.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Deserialized state dictionary.
|
|
143
|
+
"""
|
|
144
|
+
if data is None:
|
|
145
|
+
return {}
|
|
146
|
+
return from_json(str(data)) # type: ignore[no-any-return]
|
|
147
|
+
|
|
148
|
+
def _serialize_json_field(self, value: Any) -> "str | None":
|
|
149
|
+
"""Serialize optional JSON field for event storage.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
value: Value to serialize (dict or None).
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
Serialized JSON string or None.
|
|
156
|
+
"""
|
|
157
|
+
if value is None:
|
|
158
|
+
return None
|
|
159
|
+
return to_json(value)
|
|
160
|
+
|
|
161
|
+
def _deserialize_json_field(self, data: Any) -> "dict[str, Any] | None":
|
|
162
|
+
"""Deserialize optional JSON field from database.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
data: JSON string from database or None.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
Deserialized dictionary or None.
|
|
169
|
+
"""
|
|
170
|
+
if data is None:
|
|
171
|
+
return None
|
|
172
|
+
return from_json(str(data)) # type: ignore[no-any-return]
|
|
173
|
+
|
|
174
|
+
def _get_create_sessions_table_sql(self) -> str:
|
|
175
|
+
"""Get CREATE TABLE SQL for sessions with dialect dispatch.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
SQL statement to create adk_sessions table.
|
|
179
|
+
"""
|
|
180
|
+
if self._dialect == DIALECT_POSTGRESQL:
|
|
181
|
+
return self._get_sessions_ddl_postgresql()
|
|
182
|
+
if self._dialect == DIALECT_SQLITE:
|
|
183
|
+
return self._get_sessions_ddl_sqlite()
|
|
184
|
+
if self._dialect == DIALECT_DUCKDB:
|
|
185
|
+
return self._get_sessions_ddl_duckdb()
|
|
186
|
+
if self._dialect == DIALECT_SNOWFLAKE:
|
|
187
|
+
return self._get_sessions_ddl_snowflake()
|
|
188
|
+
return self._get_sessions_ddl_generic()
|
|
189
|
+
|
|
190
|
+
def _get_sessions_ddl_postgresql(self) -> str:
|
|
191
|
+
"""PostgreSQL DDL with JSONB and TIMESTAMPTZ.
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
SQL to create sessions table optimized for PostgreSQL.
|
|
195
|
+
"""
|
|
196
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
197
|
+
return f"""
|
|
198
|
+
CREATE TABLE IF NOT EXISTS {self._session_table} (
|
|
199
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
200
|
+
app_name VARCHAR(128) NOT NULL,
|
|
201
|
+
user_id VARCHAR(128) NOT NULL{owner_id_ddl},
|
|
202
|
+
state JSONB NOT NULL DEFAULT '{{}}'::jsonb,
|
|
203
|
+
create_time TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
204
|
+
update_time TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
205
|
+
)
|
|
206
|
+
"""
|
|
207
|
+
|
|
208
|
+
def _get_sessions_ddl_sqlite(self) -> str:
|
|
209
|
+
"""SQLite DDL with TEXT and REAL timestamps.
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
SQL to create sessions table optimized for SQLite.
|
|
213
|
+
"""
|
|
214
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
215
|
+
return f"""
|
|
216
|
+
CREATE TABLE IF NOT EXISTS {self._session_table} (
|
|
217
|
+
id TEXT PRIMARY KEY,
|
|
218
|
+
app_name TEXT NOT NULL,
|
|
219
|
+
user_id TEXT NOT NULL{owner_id_ddl},
|
|
220
|
+
state TEXT NOT NULL DEFAULT '{{}}',
|
|
221
|
+
create_time REAL NOT NULL,
|
|
222
|
+
update_time REAL NOT NULL
|
|
223
|
+
)
|
|
224
|
+
"""
|
|
225
|
+
|
|
226
|
+
def _get_sessions_ddl_duckdb(self) -> str:
|
|
227
|
+
"""DuckDB DDL with native JSON type.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
SQL to create sessions table optimized for DuckDB.
|
|
231
|
+
"""
|
|
232
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
233
|
+
return f"""
|
|
234
|
+
CREATE TABLE IF NOT EXISTS {self._session_table} (
|
|
235
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
236
|
+
app_name VARCHAR(128) NOT NULL,
|
|
237
|
+
user_id VARCHAR(128) NOT NULL{owner_id_ddl},
|
|
238
|
+
state JSON NOT NULL,
|
|
239
|
+
create_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
240
|
+
update_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
241
|
+
)
|
|
242
|
+
"""
|
|
243
|
+
|
|
244
|
+
def _get_sessions_ddl_snowflake(self) -> str:
|
|
245
|
+
"""Snowflake DDL with VARIANT type.
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
SQL to create sessions table optimized for Snowflake.
|
|
249
|
+
"""
|
|
250
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
251
|
+
return f"""
|
|
252
|
+
CREATE TABLE IF NOT EXISTS {self._session_table} (
|
|
253
|
+
id VARCHAR PRIMARY KEY,
|
|
254
|
+
app_name VARCHAR NOT NULL,
|
|
255
|
+
user_id VARCHAR NOT NULL{owner_id_ddl},
|
|
256
|
+
state VARIANT NOT NULL,
|
|
257
|
+
create_time TIMESTAMP_TZ NOT NULL DEFAULT CURRENT_TIMESTAMP(),
|
|
258
|
+
update_time TIMESTAMP_TZ NOT NULL DEFAULT CURRENT_TIMESTAMP()
|
|
259
|
+
)
|
|
260
|
+
"""
|
|
261
|
+
|
|
262
|
+
def _get_sessions_ddl_generic(self) -> str:
|
|
263
|
+
"""Generic SQL-92 compatible DDL fallback.
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
SQL to create sessions table using generic types.
|
|
267
|
+
"""
|
|
268
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
269
|
+
return f"""
|
|
270
|
+
CREATE TABLE IF NOT EXISTS {self._session_table} (
|
|
271
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
272
|
+
app_name VARCHAR(128) NOT NULL,
|
|
273
|
+
user_id VARCHAR(128) NOT NULL{owner_id_ddl},
|
|
274
|
+
state TEXT NOT NULL DEFAULT '{{}}',
|
|
275
|
+
create_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
276
|
+
update_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
277
|
+
)
|
|
278
|
+
"""
|
|
279
|
+
|
|
280
|
+
def _get_create_events_table_sql(self) -> str:
|
|
281
|
+
"""Get CREATE TABLE SQL for events with dialect dispatch.
|
|
282
|
+
|
|
283
|
+
Returns:
|
|
284
|
+
SQL statement to create adk_events table.
|
|
285
|
+
"""
|
|
286
|
+
if self._dialect == DIALECT_POSTGRESQL:
|
|
287
|
+
return self._get_events_ddl_postgresql()
|
|
288
|
+
if self._dialect == DIALECT_SQLITE:
|
|
289
|
+
return self._get_events_ddl_sqlite()
|
|
290
|
+
if self._dialect == DIALECT_DUCKDB:
|
|
291
|
+
return self._get_events_ddl_duckdb()
|
|
292
|
+
if self._dialect == DIALECT_SNOWFLAKE:
|
|
293
|
+
return self._get_events_ddl_snowflake()
|
|
294
|
+
return self._get_events_ddl_generic()
|
|
295
|
+
|
|
296
|
+
def _get_events_ddl_postgresql(self) -> str:
|
|
297
|
+
"""PostgreSQL DDL for events table.
|
|
298
|
+
|
|
299
|
+
Returns:
|
|
300
|
+
SQL to create events table optimized for PostgreSQL.
|
|
301
|
+
"""
|
|
302
|
+
return f"""
|
|
303
|
+
CREATE TABLE IF NOT EXISTS {self._events_table} (
|
|
304
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
305
|
+
session_id VARCHAR(128) NOT NULL,
|
|
306
|
+
app_name VARCHAR(128) NOT NULL,
|
|
307
|
+
user_id VARCHAR(128) NOT NULL,
|
|
308
|
+
invocation_id VARCHAR(256),
|
|
309
|
+
author VARCHAR(256),
|
|
310
|
+
actions BYTEA,
|
|
311
|
+
long_running_tool_ids_json TEXT,
|
|
312
|
+
branch VARCHAR(256),
|
|
313
|
+
timestamp TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
314
|
+
content JSONB,
|
|
315
|
+
grounding_metadata JSONB,
|
|
316
|
+
custom_metadata JSONB,
|
|
317
|
+
partial BOOLEAN,
|
|
318
|
+
turn_complete BOOLEAN,
|
|
319
|
+
interrupted BOOLEAN,
|
|
320
|
+
error_code VARCHAR(256),
|
|
321
|
+
error_message VARCHAR(1024),
|
|
322
|
+
FOREIGN KEY (session_id) REFERENCES {self._session_table}(id) ON DELETE CASCADE
|
|
323
|
+
)
|
|
324
|
+
"""
|
|
325
|
+
|
|
326
|
+
def _get_events_ddl_sqlite(self) -> str:
|
|
327
|
+
"""SQLite DDL for events table.
|
|
328
|
+
|
|
329
|
+
Returns:
|
|
330
|
+
SQL to create events table optimized for SQLite.
|
|
331
|
+
"""
|
|
332
|
+
return f"""
|
|
333
|
+
CREATE TABLE IF NOT EXISTS {self._events_table} (
|
|
334
|
+
id TEXT PRIMARY KEY,
|
|
335
|
+
session_id TEXT NOT NULL,
|
|
336
|
+
app_name TEXT NOT NULL,
|
|
337
|
+
user_id TEXT NOT NULL,
|
|
338
|
+
invocation_id TEXT,
|
|
339
|
+
author TEXT,
|
|
340
|
+
actions BLOB,
|
|
341
|
+
long_running_tool_ids_json TEXT,
|
|
342
|
+
branch TEXT,
|
|
343
|
+
timestamp REAL NOT NULL,
|
|
344
|
+
content TEXT,
|
|
345
|
+
grounding_metadata TEXT,
|
|
346
|
+
custom_metadata TEXT,
|
|
347
|
+
partial INTEGER,
|
|
348
|
+
turn_complete INTEGER,
|
|
349
|
+
interrupted INTEGER,
|
|
350
|
+
error_code TEXT,
|
|
351
|
+
error_message TEXT,
|
|
352
|
+
FOREIGN KEY (session_id) REFERENCES {self._session_table}(id) ON DELETE CASCADE
|
|
353
|
+
)
|
|
354
|
+
"""
|
|
355
|
+
|
|
356
|
+
def _get_events_ddl_duckdb(self) -> str:
|
|
357
|
+
"""DuckDB DDL for events table.
|
|
358
|
+
|
|
359
|
+
Returns:
|
|
360
|
+
SQL to create events table optimized for DuckDB.
|
|
361
|
+
"""
|
|
362
|
+
return f"""
|
|
363
|
+
CREATE TABLE IF NOT EXISTS {self._events_table} (
|
|
364
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
365
|
+
session_id VARCHAR(128) NOT NULL,
|
|
366
|
+
app_name VARCHAR(128) NOT NULL,
|
|
367
|
+
user_id VARCHAR(128) NOT NULL,
|
|
368
|
+
invocation_id VARCHAR(256),
|
|
369
|
+
author VARCHAR(256),
|
|
370
|
+
actions BLOB,
|
|
371
|
+
long_running_tool_ids_json VARCHAR,
|
|
372
|
+
branch VARCHAR(256),
|
|
373
|
+
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
374
|
+
content JSON,
|
|
375
|
+
grounding_metadata JSON,
|
|
376
|
+
custom_metadata JSON,
|
|
377
|
+
partial BOOLEAN,
|
|
378
|
+
turn_complete BOOLEAN,
|
|
379
|
+
interrupted BOOLEAN,
|
|
380
|
+
error_code VARCHAR(256),
|
|
381
|
+
error_message VARCHAR(1024),
|
|
382
|
+
FOREIGN KEY (session_id) REFERENCES {self._session_table}(id) ON DELETE CASCADE
|
|
383
|
+
)
|
|
384
|
+
"""
|
|
385
|
+
|
|
386
|
+
def _get_events_ddl_snowflake(self) -> str:
|
|
387
|
+
"""Snowflake DDL for events table.
|
|
388
|
+
|
|
389
|
+
Returns:
|
|
390
|
+
SQL to create events table optimized for Snowflake.
|
|
391
|
+
"""
|
|
392
|
+
return f"""
|
|
393
|
+
CREATE TABLE IF NOT EXISTS {self._events_table} (
|
|
394
|
+
id VARCHAR PRIMARY KEY,
|
|
395
|
+
session_id VARCHAR NOT NULL,
|
|
396
|
+
app_name VARCHAR NOT NULL,
|
|
397
|
+
user_id VARCHAR NOT NULL,
|
|
398
|
+
invocation_id VARCHAR,
|
|
399
|
+
author VARCHAR,
|
|
400
|
+
actions BINARY,
|
|
401
|
+
long_running_tool_ids_json VARCHAR,
|
|
402
|
+
branch VARCHAR,
|
|
403
|
+
timestamp TIMESTAMP_TZ NOT NULL DEFAULT CURRENT_TIMESTAMP(),
|
|
404
|
+
content VARIANT,
|
|
405
|
+
grounding_metadata VARIANT,
|
|
406
|
+
custom_metadata VARIANT,
|
|
407
|
+
partial BOOLEAN,
|
|
408
|
+
turn_complete BOOLEAN,
|
|
409
|
+
interrupted BOOLEAN,
|
|
410
|
+
error_code VARCHAR,
|
|
411
|
+
error_message VARCHAR,
|
|
412
|
+
FOREIGN KEY (session_id) REFERENCES {self._session_table}(id)
|
|
413
|
+
)
|
|
414
|
+
"""
|
|
415
|
+
|
|
416
|
+
def _get_events_ddl_generic(self) -> str:
|
|
417
|
+
"""Generic SQL-92 compatible DDL for events table.
|
|
418
|
+
|
|
419
|
+
Returns:
|
|
420
|
+
SQL to create events table using generic types.
|
|
421
|
+
"""
|
|
422
|
+
return f"""
|
|
423
|
+
CREATE TABLE IF NOT EXISTS {self._events_table} (
|
|
424
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
425
|
+
session_id VARCHAR(128) NOT NULL,
|
|
426
|
+
app_name VARCHAR(128) NOT NULL,
|
|
427
|
+
user_id VARCHAR(128) NOT NULL,
|
|
428
|
+
invocation_id VARCHAR(256),
|
|
429
|
+
author VARCHAR(256),
|
|
430
|
+
actions BLOB,
|
|
431
|
+
long_running_tool_ids_json TEXT,
|
|
432
|
+
branch VARCHAR(256),
|
|
433
|
+
timestamp TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
434
|
+
content TEXT,
|
|
435
|
+
grounding_metadata TEXT,
|
|
436
|
+
custom_metadata TEXT,
|
|
437
|
+
partial INTEGER,
|
|
438
|
+
turn_complete INTEGER,
|
|
439
|
+
interrupted INTEGER,
|
|
440
|
+
error_code VARCHAR(256),
|
|
441
|
+
error_message VARCHAR(1024),
|
|
442
|
+
FOREIGN KEY (session_id) REFERENCES {self._session_table}(id) ON DELETE CASCADE
|
|
443
|
+
)
|
|
444
|
+
"""
|
|
445
|
+
|
|
446
|
+
def _get_drop_tables_sql(self) -> "list[str]":
|
|
447
|
+
"""Get DROP TABLE SQL statements.
|
|
448
|
+
|
|
449
|
+
Returns:
|
|
450
|
+
List of SQL statements to drop tables and indexes.
|
|
451
|
+
|
|
452
|
+
Notes:
|
|
453
|
+
Order matters: drop events table (child) before sessions (parent).
|
|
454
|
+
Most databases automatically drop indexes when dropping tables.
|
|
455
|
+
"""
|
|
456
|
+
return [f"DROP TABLE IF EXISTS {self._events_table}", f"DROP TABLE IF EXISTS {self._session_table}"]
|
|
457
|
+
|
|
458
|
+
def create_tables(self) -> None:
|
|
459
|
+
"""Create both sessions and events tables if they don't exist."""
|
|
460
|
+
with self._config.provide_connection() as conn:
|
|
461
|
+
cursor = conn.cursor()
|
|
462
|
+
try:
|
|
463
|
+
self._enable_foreign_keys(cursor, conn)
|
|
464
|
+
|
|
465
|
+
cursor.execute(self._get_create_sessions_table_sql())
|
|
466
|
+
conn.commit()
|
|
467
|
+
|
|
468
|
+
sessions_idx_app_user = (
|
|
469
|
+
f"CREATE INDEX IF NOT EXISTS idx_{self._session_table}_app_user "
|
|
470
|
+
f"ON {self._session_table}(app_name, user_id)"
|
|
471
|
+
)
|
|
472
|
+
cursor.execute(sessions_idx_app_user)
|
|
473
|
+
conn.commit()
|
|
474
|
+
|
|
475
|
+
sessions_idx_update = (
|
|
476
|
+
f"CREATE INDEX IF NOT EXISTS idx_{self._session_table}_update_time "
|
|
477
|
+
f"ON {self._session_table}(update_time DESC)"
|
|
478
|
+
)
|
|
479
|
+
cursor.execute(sessions_idx_update)
|
|
480
|
+
conn.commit()
|
|
481
|
+
|
|
482
|
+
cursor.execute(self._get_create_events_table_sql())
|
|
483
|
+
conn.commit()
|
|
484
|
+
|
|
485
|
+
events_idx = (
|
|
486
|
+
f"CREATE INDEX IF NOT EXISTS idx_{self._events_table}_session "
|
|
487
|
+
f"ON {self._events_table}(session_id, timestamp ASC)"
|
|
488
|
+
)
|
|
489
|
+
cursor.execute(events_idx)
|
|
490
|
+
conn.commit()
|
|
491
|
+
finally:
|
|
492
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
493
|
+
|
|
494
|
+
def _enable_foreign_keys(self, cursor: Any, conn: Any) -> None:
|
|
495
|
+
"""Enable foreign key constraints for SQLite.
|
|
496
|
+
|
|
497
|
+
Args:
|
|
498
|
+
cursor: Database cursor.
|
|
499
|
+
conn: Database connection.
|
|
500
|
+
|
|
501
|
+
Notes:
|
|
502
|
+
SQLite requires PRAGMA foreign_keys = ON to be set per connection.
|
|
503
|
+
This is a no-op for other databases.
|
|
504
|
+
"""
|
|
505
|
+
try:
|
|
506
|
+
cursor.execute("PRAGMA foreign_keys = ON")
|
|
507
|
+
conn.commit()
|
|
508
|
+
except Exception:
|
|
509
|
+
logger.debug("Foreign key enforcement not supported or already enabled")
|
|
510
|
+
|
|
511
|
+
def create_session(
|
|
512
|
+
self, session_id: str, app_name: str, user_id: str, state: "dict[str, Any]", owner_id: "Any | None" = None
|
|
513
|
+
) -> SessionRecord:
|
|
514
|
+
"""Create a new session.
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
session_id: Unique session identifier.
|
|
518
|
+
app_name: Application name.
|
|
519
|
+
user_id: User identifier.
|
|
520
|
+
state: Initial session state.
|
|
521
|
+
owner_id: Optional owner ID value for owner_id_column (can be None for nullable columns).
|
|
522
|
+
|
|
523
|
+
Returns:
|
|
524
|
+
Created session record.
|
|
525
|
+
"""
|
|
526
|
+
state_json = self._serialize_state(state)
|
|
527
|
+
|
|
528
|
+
params: tuple[Any, ...]
|
|
529
|
+
if self._owner_id_column_name:
|
|
530
|
+
sql = f"""
|
|
531
|
+
INSERT INTO {self._session_table}
|
|
532
|
+
(id, app_name, user_id, {self._owner_id_column_name}, state, create_time, update_time)
|
|
533
|
+
VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
|
534
|
+
"""
|
|
535
|
+
params = (session_id, app_name, user_id, owner_id, state_json)
|
|
536
|
+
else:
|
|
537
|
+
sql = f"""
|
|
538
|
+
INSERT INTO {self._session_table} (id, app_name, user_id, state, create_time, update_time)
|
|
539
|
+
VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP, CURRENT_TIMESTAMP)
|
|
540
|
+
"""
|
|
541
|
+
params = (session_id, app_name, user_id, state_json)
|
|
542
|
+
|
|
543
|
+
with self._config.provide_connection() as conn:
|
|
544
|
+
cursor = conn.cursor()
|
|
545
|
+
try:
|
|
546
|
+
cursor.execute(sql, params)
|
|
547
|
+
conn.commit()
|
|
548
|
+
finally:
|
|
549
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
550
|
+
|
|
551
|
+
return self.get_session(session_id) # type: ignore[return-value]
|
|
552
|
+
|
|
553
|
+
def get_session(self, session_id: str) -> "SessionRecord | None":
|
|
554
|
+
"""Get session by ID.
|
|
555
|
+
|
|
556
|
+
Args:
|
|
557
|
+
session_id: Session identifier.
|
|
558
|
+
|
|
559
|
+
Returns:
|
|
560
|
+
Session record or None if not found.
|
|
561
|
+
|
|
562
|
+
Notes:
|
|
563
|
+
State is deserialized from JSON string.
|
|
564
|
+
"""
|
|
565
|
+
sql = f"""
|
|
566
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
567
|
+
FROM {self._session_table}
|
|
568
|
+
WHERE id = ?
|
|
569
|
+
"""
|
|
570
|
+
|
|
571
|
+
try:
|
|
572
|
+
with self._config.provide_connection() as conn:
|
|
573
|
+
cursor = conn.cursor()
|
|
574
|
+
try:
|
|
575
|
+
cursor.execute(sql, (session_id,))
|
|
576
|
+
row = cursor.fetchone()
|
|
577
|
+
|
|
578
|
+
if row is None:
|
|
579
|
+
return None
|
|
580
|
+
|
|
581
|
+
return SessionRecord(
|
|
582
|
+
id=row[0],
|
|
583
|
+
app_name=row[1],
|
|
584
|
+
user_id=row[2],
|
|
585
|
+
state=self._deserialize_state(row[3]),
|
|
586
|
+
create_time=row[4],
|
|
587
|
+
update_time=row[5],
|
|
588
|
+
)
|
|
589
|
+
finally:
|
|
590
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
591
|
+
except Exception as e:
|
|
592
|
+
error_msg = str(e).lower()
|
|
593
|
+
if any(pattern in error_msg for pattern in ADBC_TABLE_NOT_FOUND_PATTERNS):
|
|
594
|
+
return None
|
|
595
|
+
raise
|
|
596
|
+
|
|
597
|
+
def update_session_state(self, session_id: str, state: "dict[str, Any]") -> None:
|
|
598
|
+
"""Update session state.
|
|
599
|
+
|
|
600
|
+
Args:
|
|
601
|
+
session_id: Session identifier.
|
|
602
|
+
state: New state dictionary (replaces existing state).
|
|
603
|
+
|
|
604
|
+
Notes:
|
|
605
|
+
This replaces the entire state dictionary.
|
|
606
|
+
Updates update_time to current timestamp.
|
|
607
|
+
"""
|
|
608
|
+
state_json = self._serialize_state(state)
|
|
609
|
+
sql = f"""
|
|
610
|
+
UPDATE {self._session_table}
|
|
611
|
+
SET state = ?, update_time = CURRENT_TIMESTAMP
|
|
612
|
+
WHERE id = ?
|
|
613
|
+
"""
|
|
614
|
+
|
|
615
|
+
with self._config.provide_connection() as conn:
|
|
616
|
+
cursor = conn.cursor()
|
|
617
|
+
try:
|
|
618
|
+
cursor.execute(sql, (state_json, session_id))
|
|
619
|
+
conn.commit()
|
|
620
|
+
finally:
|
|
621
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
622
|
+
|
|
623
|
+
def delete_session(self, session_id: str) -> None:
|
|
624
|
+
"""Delete session and all associated events (cascade).
|
|
625
|
+
|
|
626
|
+
Args:
|
|
627
|
+
session_id: Session identifier.
|
|
628
|
+
|
|
629
|
+
Notes:
|
|
630
|
+
Foreign key constraint ensures events are cascade-deleted.
|
|
631
|
+
"""
|
|
632
|
+
sql = f"DELETE FROM {self._session_table} WHERE id = ?"
|
|
633
|
+
|
|
634
|
+
with self._config.provide_connection() as conn:
|
|
635
|
+
cursor = conn.cursor()
|
|
636
|
+
try:
|
|
637
|
+
self._enable_foreign_keys(cursor, conn)
|
|
638
|
+
cursor.execute(sql, (session_id,))
|
|
639
|
+
conn.commit()
|
|
640
|
+
finally:
|
|
641
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
642
|
+
|
|
643
|
+
def list_sessions(self, app_name: str, user_id: str | None = None) -> "list[SessionRecord]":
|
|
644
|
+
"""List sessions for an app, optionally filtered by user.
|
|
645
|
+
|
|
646
|
+
Args:
|
|
647
|
+
app_name: Application name.
|
|
648
|
+
user_id: User identifier. If None, lists all sessions for the app.
|
|
649
|
+
|
|
650
|
+
Returns:
|
|
651
|
+
List of session records ordered by update_time DESC.
|
|
652
|
+
|
|
653
|
+
Notes:
|
|
654
|
+
Uses composite index on (app_name, user_id) when user_id is provided.
|
|
655
|
+
"""
|
|
656
|
+
if user_id is None:
|
|
657
|
+
sql = f"""
|
|
658
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
659
|
+
FROM {self._session_table}
|
|
660
|
+
WHERE app_name = ?
|
|
661
|
+
ORDER BY update_time DESC
|
|
662
|
+
"""
|
|
663
|
+
params: tuple[str, ...] = (app_name,)
|
|
664
|
+
else:
|
|
665
|
+
sql = f"""
|
|
666
|
+
SELECT id, app_name, user_id, state, create_time, update_time
|
|
667
|
+
FROM {self._session_table}
|
|
668
|
+
WHERE app_name = ? AND user_id = ?
|
|
669
|
+
ORDER BY update_time DESC
|
|
670
|
+
"""
|
|
671
|
+
params = (app_name, user_id)
|
|
672
|
+
|
|
673
|
+
try:
|
|
674
|
+
with self._config.provide_connection() as conn:
|
|
675
|
+
cursor = conn.cursor()
|
|
676
|
+
try:
|
|
677
|
+
cursor.execute(sql, params)
|
|
678
|
+
rows = cursor.fetchall()
|
|
679
|
+
|
|
680
|
+
return [
|
|
681
|
+
SessionRecord(
|
|
682
|
+
id=row[0],
|
|
683
|
+
app_name=row[1],
|
|
684
|
+
user_id=row[2],
|
|
685
|
+
state=self._deserialize_state(row[3]),
|
|
686
|
+
create_time=row[4],
|
|
687
|
+
update_time=row[5],
|
|
688
|
+
)
|
|
689
|
+
for row in rows
|
|
690
|
+
]
|
|
691
|
+
finally:
|
|
692
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
693
|
+
except Exception as e:
|
|
694
|
+
error_msg = str(e).lower()
|
|
695
|
+
if any(pattern in error_msg for pattern in ADBC_TABLE_NOT_FOUND_PATTERNS):
|
|
696
|
+
return []
|
|
697
|
+
raise
|
|
698
|
+
|
|
699
|
+
def create_event(
|
|
700
|
+
self,
|
|
701
|
+
event_id: str,
|
|
702
|
+
session_id: str,
|
|
703
|
+
app_name: str,
|
|
704
|
+
user_id: str,
|
|
705
|
+
author: "str | None" = None,
|
|
706
|
+
actions: "bytes | None" = None,
|
|
707
|
+
content: "dict[str, Any] | None" = None,
|
|
708
|
+
**kwargs: Any,
|
|
709
|
+
) -> "EventRecord":
|
|
710
|
+
"""Create a new event.
|
|
711
|
+
|
|
712
|
+
Args:
|
|
713
|
+
event_id: Unique event identifier.
|
|
714
|
+
session_id: Session identifier.
|
|
715
|
+
app_name: Application name.
|
|
716
|
+
user_id: User identifier.
|
|
717
|
+
author: Event author (user/assistant/system).
|
|
718
|
+
actions: Pickled actions object.
|
|
719
|
+
content: Event content (JSON).
|
|
720
|
+
**kwargs: Additional optional fields.
|
|
721
|
+
|
|
722
|
+
Returns:
|
|
723
|
+
Created event record.
|
|
724
|
+
|
|
725
|
+
Notes:
|
|
726
|
+
Uses CURRENT_TIMESTAMP for timestamp if not provided.
|
|
727
|
+
JSON fields are serialized to JSON strings.
|
|
728
|
+
Boolean fields are converted to INTEGER (0/1).
|
|
729
|
+
"""
|
|
730
|
+
content_json = self._serialize_json_field(content)
|
|
731
|
+
grounding_metadata_json = self._serialize_json_field(kwargs.get("grounding_metadata"))
|
|
732
|
+
custom_metadata_json = self._serialize_json_field(kwargs.get("custom_metadata"))
|
|
733
|
+
|
|
734
|
+
partial_int = self._to_int_bool(kwargs.get("partial"))
|
|
735
|
+
turn_complete_int = self._to_int_bool(kwargs.get("turn_complete"))
|
|
736
|
+
interrupted_int = self._to_int_bool(kwargs.get("interrupted"))
|
|
737
|
+
|
|
738
|
+
sql = f"""
|
|
739
|
+
INSERT INTO {self._events_table} (
|
|
740
|
+
id, session_id, app_name, user_id, invocation_id, author, actions,
|
|
741
|
+
long_running_tool_ids_json, branch, timestamp, content,
|
|
742
|
+
grounding_metadata, custom_metadata, partial, turn_complete,
|
|
743
|
+
interrupted, error_code, error_message
|
|
744
|
+
) VALUES (
|
|
745
|
+
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
|
746
|
+
)
|
|
747
|
+
"""
|
|
748
|
+
|
|
749
|
+
timestamp = kwargs.get("timestamp")
|
|
750
|
+
if timestamp is None:
|
|
751
|
+
timestamp = datetime.now(timezone.utc)
|
|
752
|
+
|
|
753
|
+
with self._config.provide_connection() as conn:
|
|
754
|
+
cursor = conn.cursor()
|
|
755
|
+
try:
|
|
756
|
+
cursor.execute(
|
|
757
|
+
sql,
|
|
758
|
+
(
|
|
759
|
+
event_id,
|
|
760
|
+
session_id,
|
|
761
|
+
app_name,
|
|
762
|
+
user_id,
|
|
763
|
+
kwargs.get("invocation_id"),
|
|
764
|
+
author,
|
|
765
|
+
actions,
|
|
766
|
+
kwargs.get("long_running_tool_ids_json"),
|
|
767
|
+
kwargs.get("branch"),
|
|
768
|
+
timestamp,
|
|
769
|
+
content_json,
|
|
770
|
+
grounding_metadata_json,
|
|
771
|
+
custom_metadata_json,
|
|
772
|
+
partial_int,
|
|
773
|
+
turn_complete_int,
|
|
774
|
+
interrupted_int,
|
|
775
|
+
kwargs.get("error_code"),
|
|
776
|
+
kwargs.get("error_message"),
|
|
777
|
+
),
|
|
778
|
+
)
|
|
779
|
+
conn.commit()
|
|
780
|
+
finally:
|
|
781
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
782
|
+
|
|
783
|
+
events = self.list_events(session_id)
|
|
784
|
+
for event in events:
|
|
785
|
+
if event["id"] == event_id:
|
|
786
|
+
return event
|
|
787
|
+
|
|
788
|
+
msg = f"Failed to retrieve created event {event_id}"
|
|
789
|
+
raise RuntimeError(msg)
|
|
790
|
+
|
|
791
|
+
def list_events(self, session_id: str) -> "list[EventRecord]":
|
|
792
|
+
"""List events for a session ordered by timestamp.
|
|
793
|
+
|
|
794
|
+
Args:
|
|
795
|
+
session_id: Session identifier.
|
|
796
|
+
|
|
797
|
+
Returns:
|
|
798
|
+
List of event records ordered by timestamp ASC.
|
|
799
|
+
|
|
800
|
+
Notes:
|
|
801
|
+
Uses index on (session_id, timestamp ASC).
|
|
802
|
+
JSON fields deserialized from JSON strings.
|
|
803
|
+
Converts INTEGER booleans to Python bool.
|
|
804
|
+
"""
|
|
805
|
+
sql = f"""
|
|
806
|
+
SELECT id, session_id, app_name, user_id, invocation_id, author, actions,
|
|
807
|
+
long_running_tool_ids_json, branch, timestamp, content,
|
|
808
|
+
grounding_metadata, custom_metadata, partial, turn_complete,
|
|
809
|
+
interrupted, error_code, error_message
|
|
810
|
+
FROM {self._events_table}
|
|
811
|
+
WHERE session_id = ?
|
|
812
|
+
ORDER BY timestamp ASC
|
|
813
|
+
"""
|
|
814
|
+
|
|
815
|
+
try:
|
|
816
|
+
with self._config.provide_connection() as conn:
|
|
817
|
+
cursor = conn.cursor()
|
|
818
|
+
try:
|
|
819
|
+
cursor.execute(sql, (session_id,))
|
|
820
|
+
rows = cursor.fetchall()
|
|
821
|
+
|
|
822
|
+
return [
|
|
823
|
+
EventRecord(
|
|
824
|
+
id=row[0],
|
|
825
|
+
session_id=row[1],
|
|
826
|
+
app_name=row[2],
|
|
827
|
+
user_id=row[3],
|
|
828
|
+
invocation_id=row[4],
|
|
829
|
+
author=row[5],
|
|
830
|
+
actions=bytes(row[6]) if row[6] is not None else b"",
|
|
831
|
+
long_running_tool_ids_json=row[7],
|
|
832
|
+
branch=row[8],
|
|
833
|
+
timestamp=row[9],
|
|
834
|
+
content=self._deserialize_json_field(row[10]),
|
|
835
|
+
grounding_metadata=self._deserialize_json_field(row[11]),
|
|
836
|
+
custom_metadata=self._deserialize_json_field(row[12]),
|
|
837
|
+
partial=self._from_int_bool(row[13]),
|
|
838
|
+
turn_complete=self._from_int_bool(row[14]),
|
|
839
|
+
interrupted=self._from_int_bool(row[15]),
|
|
840
|
+
error_code=row[16],
|
|
841
|
+
error_message=row[17],
|
|
842
|
+
)
|
|
843
|
+
for row in rows
|
|
844
|
+
]
|
|
845
|
+
finally:
|
|
846
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
847
|
+
except Exception as e:
|
|
848
|
+
error_msg = str(e).lower()
|
|
849
|
+
if any(pattern in error_msg for pattern in ADBC_TABLE_NOT_FOUND_PATTERNS):
|
|
850
|
+
return []
|
|
851
|
+
raise
|
|
852
|
+
|
|
853
|
+
@staticmethod
|
|
854
|
+
def _to_int_bool(value: "bool | None") -> "int | None":
|
|
855
|
+
"""Convert Python boolean to INTEGER (0/1).
|
|
856
|
+
|
|
857
|
+
Args:
|
|
858
|
+
value: Python boolean value or None.
|
|
859
|
+
|
|
860
|
+
Returns:
|
|
861
|
+
1 for True, 0 for False, None for None.
|
|
862
|
+
"""
|
|
863
|
+
if value is None:
|
|
864
|
+
return None
|
|
865
|
+
return 1 if value else 0
|
|
866
|
+
|
|
867
|
+
@staticmethod
|
|
868
|
+
def _from_int_bool(value: "int | None") -> "bool | None":
|
|
869
|
+
"""Convert INTEGER to Python boolean.
|
|
870
|
+
|
|
871
|
+
Args:
|
|
872
|
+
value: INTEGER value (0, 1, or None).
|
|
873
|
+
|
|
874
|
+
Returns:
|
|
875
|
+
Python boolean or None.
|
|
876
|
+
"""
|
|
877
|
+
if value is None:
|
|
878
|
+
return None
|
|
879
|
+
return bool(value)
|
|
880
|
+
|
|
881
|
+
|
|
882
|
+
class AdbcADKMemoryStore(BaseSyncADKMemoryStore["AdbcConfig"]):
|
|
883
|
+
"""ADBC synchronous ADK memory store for Arrow Database Connectivity."""
|
|
884
|
+
|
|
885
|
+
__slots__ = ("_dialect",)
|
|
886
|
+
|
|
887
|
+
def __init__(self, config: "AdbcConfig") -> None:
|
|
888
|
+
super().__init__(config)
|
|
889
|
+
self._dialect = self._detect_dialect()
|
|
890
|
+
|
|
891
|
+
@property
|
|
892
|
+
def dialect(self) -> str:
|
|
893
|
+
return self._dialect
|
|
894
|
+
|
|
895
|
+
def _detect_dialect(self) -> str:
|
|
896
|
+
driver_name = self._config.connection_config.get("driver_name", "").lower()
|
|
897
|
+
if "postgres" in driver_name:
|
|
898
|
+
return DIALECT_POSTGRESQL
|
|
899
|
+
if "sqlite" in driver_name:
|
|
900
|
+
return DIALECT_SQLITE
|
|
901
|
+
if "duckdb" in driver_name:
|
|
902
|
+
return DIALECT_DUCKDB
|
|
903
|
+
if "snowflake" in driver_name:
|
|
904
|
+
return DIALECT_SNOWFLAKE
|
|
905
|
+
logger.warning("Unknown ADBC driver: %s. Using generic SQL dialect.", driver_name)
|
|
906
|
+
return DIALECT_GENERIC
|
|
907
|
+
|
|
908
|
+
def _serialize_json_field(self, value: Any) -> "str | None":
|
|
909
|
+
if value is None:
|
|
910
|
+
return None
|
|
911
|
+
return to_json(value)
|
|
912
|
+
|
|
913
|
+
def _encode_timestamp(self, value: datetime) -> Any:
|
|
914
|
+
if self._dialect == DIALECT_SQLITE:
|
|
915
|
+
return value.timestamp()
|
|
916
|
+
return value
|
|
917
|
+
|
|
918
|
+
def _decode_timestamp(self, value: Any) -> datetime:
|
|
919
|
+
if isinstance(value, datetime):
|
|
920
|
+
return value
|
|
921
|
+
if isinstance(value, (int, float)):
|
|
922
|
+
return datetime.fromtimestamp(float(value), tz=timezone.utc)
|
|
923
|
+
if isinstance(value, str):
|
|
924
|
+
return datetime.fromisoformat(value)
|
|
925
|
+
return datetime.fromisoformat(str(value))
|
|
926
|
+
|
|
927
|
+
def _get_create_memory_table_sql(self) -> str:
|
|
928
|
+
if self._dialect == DIALECT_POSTGRESQL:
|
|
929
|
+
return self._get_memory_ddl_postgresql()
|
|
930
|
+
if self._dialect == DIALECT_SQLITE:
|
|
931
|
+
return self._get_memory_ddl_sqlite()
|
|
932
|
+
if self._dialect == DIALECT_DUCKDB:
|
|
933
|
+
return self._get_memory_ddl_duckdb()
|
|
934
|
+
if self._dialect == DIALECT_SNOWFLAKE:
|
|
935
|
+
return self._get_memory_ddl_snowflake()
|
|
936
|
+
return self._get_memory_ddl_generic()
|
|
937
|
+
|
|
938
|
+
def _get_memory_ddl_postgresql(self) -> str:
|
|
939
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
940
|
+
return f"""
|
|
941
|
+
CREATE TABLE IF NOT EXISTS {self._memory_table} (
|
|
942
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
943
|
+
session_id VARCHAR(128) NOT NULL,
|
|
944
|
+
app_name VARCHAR(128) NOT NULL,
|
|
945
|
+
user_id VARCHAR(128) NOT NULL,
|
|
946
|
+
event_id VARCHAR(128) NOT NULL UNIQUE,
|
|
947
|
+
author VARCHAR(256){owner_id_ddl},
|
|
948
|
+
timestamp TIMESTAMPTZ NOT NULL,
|
|
949
|
+
content_json JSONB NOT NULL,
|
|
950
|
+
content_text TEXT NOT NULL,
|
|
951
|
+
metadata_json JSONB,
|
|
952
|
+
inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
953
|
+
)
|
|
954
|
+
"""
|
|
955
|
+
|
|
956
|
+
def _get_memory_ddl_sqlite(self) -> str:
|
|
957
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
958
|
+
return f"""
|
|
959
|
+
CREATE TABLE IF NOT EXISTS {self._memory_table} (
|
|
960
|
+
id TEXT PRIMARY KEY,
|
|
961
|
+
session_id TEXT NOT NULL,
|
|
962
|
+
app_name TEXT NOT NULL,
|
|
963
|
+
user_id TEXT NOT NULL,
|
|
964
|
+
event_id TEXT NOT NULL UNIQUE,
|
|
965
|
+
author TEXT{owner_id_ddl},
|
|
966
|
+
timestamp REAL NOT NULL,
|
|
967
|
+
content_json TEXT NOT NULL,
|
|
968
|
+
content_text TEXT NOT NULL,
|
|
969
|
+
metadata_json TEXT,
|
|
970
|
+
inserted_at REAL NOT NULL
|
|
971
|
+
)
|
|
972
|
+
"""
|
|
973
|
+
|
|
974
|
+
def _get_memory_ddl_duckdb(self) -> str:
|
|
975
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
976
|
+
return f"""
|
|
977
|
+
CREATE TABLE IF NOT EXISTS {self._memory_table} (
|
|
978
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
979
|
+
session_id VARCHAR(128) NOT NULL,
|
|
980
|
+
app_name VARCHAR(128) NOT NULL,
|
|
981
|
+
user_id VARCHAR(128) NOT NULL,
|
|
982
|
+
event_id VARCHAR(128) NOT NULL UNIQUE,
|
|
983
|
+
author VARCHAR(256){owner_id_ddl},
|
|
984
|
+
timestamp TIMESTAMP NOT NULL,
|
|
985
|
+
content_json JSON NOT NULL,
|
|
986
|
+
content_text TEXT NOT NULL,
|
|
987
|
+
metadata_json JSON,
|
|
988
|
+
inserted_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
989
|
+
)
|
|
990
|
+
"""
|
|
991
|
+
|
|
992
|
+
def _get_memory_ddl_snowflake(self) -> str:
|
|
993
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
994
|
+
return f"""
|
|
995
|
+
CREATE TABLE IF NOT EXISTS {self._memory_table} (
|
|
996
|
+
id VARCHAR PRIMARY KEY,
|
|
997
|
+
session_id VARCHAR NOT NULL,
|
|
998
|
+
app_name VARCHAR NOT NULL,
|
|
999
|
+
user_id VARCHAR NOT NULL,
|
|
1000
|
+
event_id VARCHAR NOT NULL UNIQUE,
|
|
1001
|
+
author VARCHAR{owner_id_ddl},
|
|
1002
|
+
timestamp TIMESTAMP_TZ NOT NULL,
|
|
1003
|
+
content_json VARIANT NOT NULL,
|
|
1004
|
+
content_text TEXT NOT NULL,
|
|
1005
|
+
metadata_json VARIANT,
|
|
1006
|
+
inserted_at TIMESTAMP_TZ NOT NULL DEFAULT CURRENT_TIMESTAMP()
|
|
1007
|
+
)
|
|
1008
|
+
"""
|
|
1009
|
+
|
|
1010
|
+
def _get_memory_ddl_generic(self) -> str:
|
|
1011
|
+
owner_id_ddl = f", {self._owner_id_column_ddl}" if self._owner_id_column_ddl else ""
|
|
1012
|
+
return f"""
|
|
1013
|
+
CREATE TABLE IF NOT EXISTS {self._memory_table} (
|
|
1014
|
+
id VARCHAR(128) PRIMARY KEY,
|
|
1015
|
+
session_id VARCHAR(128) NOT NULL,
|
|
1016
|
+
app_name VARCHAR(128) NOT NULL,
|
|
1017
|
+
user_id VARCHAR(128) NOT NULL,
|
|
1018
|
+
event_id VARCHAR(128) NOT NULL UNIQUE,
|
|
1019
|
+
author VARCHAR(256){owner_id_ddl},
|
|
1020
|
+
timestamp TIMESTAMP NOT NULL,
|
|
1021
|
+
content_json TEXT NOT NULL,
|
|
1022
|
+
content_text TEXT NOT NULL,
|
|
1023
|
+
metadata_json TEXT,
|
|
1024
|
+
inserted_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
1025
|
+
)
|
|
1026
|
+
"""
|
|
1027
|
+
|
|
1028
|
+
def _get_drop_memory_table_sql(self) -> "list[str]":
|
|
1029
|
+
return [f"DROP TABLE IF EXISTS {self._memory_table}"]
|
|
1030
|
+
|
|
1031
|
+
def create_tables(self) -> None:
|
|
1032
|
+
if not self._enabled:
|
|
1033
|
+
return
|
|
1034
|
+
|
|
1035
|
+
with self._config.provide_connection() as conn:
|
|
1036
|
+
cursor = conn.cursor()
|
|
1037
|
+
try:
|
|
1038
|
+
cursor.execute(self._get_create_memory_table_sql())
|
|
1039
|
+
conn.commit()
|
|
1040
|
+
|
|
1041
|
+
idx_app_user = (
|
|
1042
|
+
f"CREATE INDEX IF NOT EXISTS idx_{self._memory_table}_app_user_time "
|
|
1043
|
+
f"ON {self._memory_table}(app_name, user_id, timestamp DESC)"
|
|
1044
|
+
)
|
|
1045
|
+
cursor.execute(idx_app_user)
|
|
1046
|
+
conn.commit()
|
|
1047
|
+
|
|
1048
|
+
idx_session = (
|
|
1049
|
+
f"CREATE INDEX IF NOT EXISTS idx_{self._memory_table}_session ON {self._memory_table}(session_id)"
|
|
1050
|
+
)
|
|
1051
|
+
cursor.execute(idx_session)
|
|
1052
|
+
conn.commit()
|
|
1053
|
+
finally:
|
|
1054
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
1055
|
+
|
|
1056
|
+
def insert_memory_entries(self, entries: "list[MemoryRecord]", owner_id: "object | None" = None) -> int:
|
|
1057
|
+
if not self._enabled:
|
|
1058
|
+
msg = "Memory store is disabled"
|
|
1059
|
+
raise RuntimeError(msg)
|
|
1060
|
+
|
|
1061
|
+
if not entries:
|
|
1062
|
+
return 0
|
|
1063
|
+
|
|
1064
|
+
inserted_count = 0
|
|
1065
|
+
use_returning = self._dialect in {DIALECT_SQLITE, DIALECT_POSTGRESQL, DIALECT_DUCKDB}
|
|
1066
|
+
|
|
1067
|
+
if self._owner_id_column_name:
|
|
1068
|
+
if use_returning:
|
|
1069
|
+
sql = f"""
|
|
1070
|
+
INSERT INTO {self._memory_table} (
|
|
1071
|
+
id, session_id, app_name, user_id, event_id, author,
|
|
1072
|
+
{self._owner_id_column_name}, timestamp, content_json, content_text,
|
|
1073
|
+
metadata_json, inserted_at
|
|
1074
|
+
) VALUES (
|
|
1075
|
+
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
|
1076
|
+
) ON CONFLICT(event_id) DO NOTHING RETURNING 1
|
|
1077
|
+
"""
|
|
1078
|
+
else:
|
|
1079
|
+
sql = f"""
|
|
1080
|
+
INSERT INTO {self._memory_table} (
|
|
1081
|
+
id, session_id, app_name, user_id, event_id, author,
|
|
1082
|
+
{self._owner_id_column_name}, timestamp, content_json, content_text,
|
|
1083
|
+
metadata_json, inserted_at
|
|
1084
|
+
) VALUES (
|
|
1085
|
+
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
|
1086
|
+
)
|
|
1087
|
+
"""
|
|
1088
|
+
elif use_returning:
|
|
1089
|
+
sql = f"""
|
|
1090
|
+
INSERT INTO {self._memory_table} (
|
|
1091
|
+
id, session_id, app_name, user_id, event_id, author,
|
|
1092
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
1093
|
+
) VALUES (
|
|
1094
|
+
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
|
1095
|
+
) ON CONFLICT(event_id) DO NOTHING RETURNING 1
|
|
1096
|
+
"""
|
|
1097
|
+
else:
|
|
1098
|
+
sql = f"""
|
|
1099
|
+
INSERT INTO {self._memory_table} (
|
|
1100
|
+
id, session_id, app_name, user_id, event_id, author,
|
|
1101
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
1102
|
+
) VALUES (
|
|
1103
|
+
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
|
1104
|
+
)
|
|
1105
|
+
"""
|
|
1106
|
+
|
|
1107
|
+
with self._config.provide_connection() as conn:
|
|
1108
|
+
cursor = conn.cursor()
|
|
1109
|
+
try:
|
|
1110
|
+
for entry in entries:
|
|
1111
|
+
content_json = self._serialize_json_field(entry["content_json"])
|
|
1112
|
+
metadata_json = self._serialize_json_field(entry["metadata_json"])
|
|
1113
|
+
params: tuple[Any, ...]
|
|
1114
|
+
if self._owner_id_column_name:
|
|
1115
|
+
params = (
|
|
1116
|
+
entry["id"],
|
|
1117
|
+
entry["session_id"],
|
|
1118
|
+
entry["app_name"],
|
|
1119
|
+
entry["user_id"],
|
|
1120
|
+
entry["event_id"],
|
|
1121
|
+
entry["author"],
|
|
1122
|
+
owner_id,
|
|
1123
|
+
self._encode_timestamp(entry["timestamp"]),
|
|
1124
|
+
content_json,
|
|
1125
|
+
entry["content_text"],
|
|
1126
|
+
metadata_json,
|
|
1127
|
+
self._encode_timestamp(entry["inserted_at"]),
|
|
1128
|
+
)
|
|
1129
|
+
else:
|
|
1130
|
+
params = (
|
|
1131
|
+
entry["id"],
|
|
1132
|
+
entry["session_id"],
|
|
1133
|
+
entry["app_name"],
|
|
1134
|
+
entry["user_id"],
|
|
1135
|
+
entry["event_id"],
|
|
1136
|
+
entry["author"],
|
|
1137
|
+
self._encode_timestamp(entry["timestamp"]),
|
|
1138
|
+
content_json,
|
|
1139
|
+
entry["content_text"],
|
|
1140
|
+
metadata_json,
|
|
1141
|
+
self._encode_timestamp(entry["inserted_at"]),
|
|
1142
|
+
)
|
|
1143
|
+
if use_returning:
|
|
1144
|
+
cursor.execute(sql, params)
|
|
1145
|
+
if cursor.fetchone():
|
|
1146
|
+
inserted_count += 1
|
|
1147
|
+
else:
|
|
1148
|
+
try:
|
|
1149
|
+
cursor.execute(sql, params)
|
|
1150
|
+
inserted_count += 1
|
|
1151
|
+
except Exception as exc:
|
|
1152
|
+
exc_str = str(exc).lower()
|
|
1153
|
+
if "unique" in exc_str or "constraint" in exc_str or "duplicate" in exc_str:
|
|
1154
|
+
continue
|
|
1155
|
+
raise
|
|
1156
|
+
conn.commit()
|
|
1157
|
+
finally:
|
|
1158
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
1159
|
+
|
|
1160
|
+
return inserted_count
|
|
1161
|
+
|
|
1162
|
+
def search_entries(
|
|
1163
|
+
self, query: str, app_name: str, user_id: str, limit: "int | None" = None
|
|
1164
|
+
) -> "list[MemoryRecord]":
|
|
1165
|
+
if not self._enabled:
|
|
1166
|
+
msg = "Memory store is disabled"
|
|
1167
|
+
raise RuntimeError(msg)
|
|
1168
|
+
|
|
1169
|
+
if self._use_fts:
|
|
1170
|
+
logger.warning("ADBC memory store does not support FTS, falling back to simple search")
|
|
1171
|
+
|
|
1172
|
+
effective_limit = limit if limit is not None else self._max_results
|
|
1173
|
+
pattern = f"%{query}%"
|
|
1174
|
+
|
|
1175
|
+
sql = f"""
|
|
1176
|
+
SELECT id, session_id, app_name, user_id, event_id, author,
|
|
1177
|
+
timestamp, content_json, content_text, metadata_json, inserted_at
|
|
1178
|
+
FROM {self._memory_table}
|
|
1179
|
+
WHERE app_name = ?
|
|
1180
|
+
AND user_id = ?
|
|
1181
|
+
AND content_text LIKE ?
|
|
1182
|
+
ORDER BY timestamp DESC
|
|
1183
|
+
LIMIT ?
|
|
1184
|
+
"""
|
|
1185
|
+
|
|
1186
|
+
try:
|
|
1187
|
+
with self._config.provide_connection() as conn:
|
|
1188
|
+
cursor = conn.cursor()
|
|
1189
|
+
try:
|
|
1190
|
+
cursor.execute(sql, (app_name, user_id, pattern, effective_limit))
|
|
1191
|
+
rows = cursor.fetchall()
|
|
1192
|
+
finally:
|
|
1193
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
1194
|
+
except Exception as exc:
|
|
1195
|
+
error_msg = str(exc).lower()
|
|
1196
|
+
if any(pattern in error_msg for pattern in ADBC_TABLE_NOT_FOUND_PATTERNS):
|
|
1197
|
+
return []
|
|
1198
|
+
raise
|
|
1199
|
+
|
|
1200
|
+
return self._rows_to_records(rows)
|
|
1201
|
+
|
|
1202
|
+
def delete_entries_by_session(self, session_id: str) -> int:
|
|
1203
|
+
use_returning = self._dialect in {DIALECT_SQLITE, DIALECT_POSTGRESQL, DIALECT_DUCKDB}
|
|
1204
|
+
if use_returning:
|
|
1205
|
+
sql = f"DELETE FROM {self._memory_table} WHERE session_id = ? RETURNING 1"
|
|
1206
|
+
else:
|
|
1207
|
+
sql = f"DELETE FROM {self._memory_table} WHERE session_id = ?"
|
|
1208
|
+
with self._config.provide_connection() as conn:
|
|
1209
|
+
cursor = conn.cursor()
|
|
1210
|
+
try:
|
|
1211
|
+
cursor.execute(sql, (session_id,))
|
|
1212
|
+
if use_returning:
|
|
1213
|
+
deleted_rows = cursor.fetchall()
|
|
1214
|
+
conn.commit()
|
|
1215
|
+
return len(deleted_rows)
|
|
1216
|
+
conn.commit()
|
|
1217
|
+
return cursor.rowcount if cursor.rowcount and cursor.rowcount > 0 else 0
|
|
1218
|
+
finally:
|
|
1219
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
1220
|
+
|
|
1221
|
+
def delete_entries_older_than(self, days: int) -> int:
|
|
1222
|
+
cutoff = self._encode_timestamp(datetime.now(timezone.utc) - timedelta(days=days))
|
|
1223
|
+
use_returning = self._dialect in {DIALECT_SQLITE, DIALECT_POSTGRESQL, DIALECT_DUCKDB}
|
|
1224
|
+
if use_returning:
|
|
1225
|
+
sql = f"DELETE FROM {self._memory_table} WHERE inserted_at < ? RETURNING 1"
|
|
1226
|
+
else:
|
|
1227
|
+
sql = f"DELETE FROM {self._memory_table} WHERE inserted_at < ?"
|
|
1228
|
+
with self._config.provide_connection() as conn:
|
|
1229
|
+
cursor = conn.cursor()
|
|
1230
|
+
try:
|
|
1231
|
+
cursor.execute(sql, (cutoff,))
|
|
1232
|
+
if use_returning:
|
|
1233
|
+
deleted_rows = cursor.fetchall()
|
|
1234
|
+
conn.commit()
|
|
1235
|
+
return len(deleted_rows)
|
|
1236
|
+
conn.commit()
|
|
1237
|
+
return cursor.rowcount if cursor.rowcount and cursor.rowcount > 0 else 0
|
|
1238
|
+
finally:
|
|
1239
|
+
cursor.close() # type: ignore[no-untyped-call]
|
|
1240
|
+
|
|
1241
|
+
def _rows_to_records(self, rows: "list[Any]") -> "list[MemoryRecord]":
|
|
1242
|
+
records: list[MemoryRecord] = []
|
|
1243
|
+
for row in rows:
|
|
1244
|
+
content_json = row[7]
|
|
1245
|
+
if isinstance(content_json, dict):
|
|
1246
|
+
content_value = content_json
|
|
1247
|
+
else:
|
|
1248
|
+
content_value = from_json(content_json if isinstance(content_json, (str, bytes)) else str(content_json))
|
|
1249
|
+
|
|
1250
|
+
metadata_json = row[9]
|
|
1251
|
+
if metadata_json is None:
|
|
1252
|
+
metadata_value = None
|
|
1253
|
+
elif isinstance(metadata_json, dict):
|
|
1254
|
+
metadata_value = metadata_json
|
|
1255
|
+
else:
|
|
1256
|
+
metadata_value = from_json(
|
|
1257
|
+
metadata_json if isinstance(metadata_json, (str, bytes)) else str(metadata_json)
|
|
1258
|
+
)
|
|
1259
|
+
|
|
1260
|
+
records.append({
|
|
1261
|
+
"id": row[0],
|
|
1262
|
+
"session_id": row[1],
|
|
1263
|
+
"app_name": row[2],
|
|
1264
|
+
"user_id": row[3],
|
|
1265
|
+
"event_id": row[4],
|
|
1266
|
+
"author": row[5],
|
|
1267
|
+
"timestamp": self._decode_timestamp(row[6]),
|
|
1268
|
+
"content_json": content_value,
|
|
1269
|
+
"content_text": row[8],
|
|
1270
|
+
"metadata_json": metadata_value,
|
|
1271
|
+
"inserted_at": self._decode_timestamp(row[10]),
|
|
1272
|
+
})
|
|
1273
|
+
return records
|