sqlspec 0.26.0__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +7 -15
- sqlspec/_serialization.py +55 -25
- sqlspec/_typing.py +62 -52
- sqlspec/adapters/adbc/_types.py +1 -1
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +870 -0
- sqlspec/adapters/adbc/config.py +62 -12
- sqlspec/adapters/adbc/data_dictionary.py +52 -2
- sqlspec/adapters/adbc/driver.py +144 -45
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +504 -0
- sqlspec/adapters/adbc/type_converter.py +44 -50
- sqlspec/adapters/aiosqlite/_types.py +1 -1
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +527 -0
- sqlspec/adapters/aiosqlite/config.py +86 -16
- sqlspec/adapters/aiosqlite/data_dictionary.py +34 -2
- sqlspec/adapters/aiosqlite/driver.py +127 -38
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
- sqlspec/adapters/aiosqlite/pool.py +7 -7
- sqlspec/adapters/asyncmy/__init__.py +7 -1
- sqlspec/adapters/asyncmy/_types.py +1 -1
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +493 -0
- sqlspec/adapters/asyncmy/config.py +59 -17
- sqlspec/adapters/asyncmy/data_dictionary.py +41 -2
- sqlspec/adapters/asyncmy/driver.py +293 -62
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +2 -1
- sqlspec/adapters/asyncpg/_type_handlers.py +71 -0
- sqlspec/adapters/asyncpg/_types.py +11 -7
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +450 -0
- sqlspec/adapters/asyncpg/config.py +57 -36
- sqlspec/adapters/asyncpg/data_dictionary.py +41 -2
- sqlspec/adapters/asyncpg/driver.py +153 -23
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +253 -0
- sqlspec/adapters/bigquery/_types.py +1 -1
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +576 -0
- sqlspec/adapters/bigquery/config.py +25 -11
- sqlspec/adapters/bigquery/data_dictionary.py +42 -2
- sqlspec/adapters/bigquery/driver.py +352 -144
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +327 -0
- sqlspec/adapters/bigquery/type_converter.py +55 -23
- sqlspec/adapters/duckdb/_types.py +2 -2
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +553 -0
- sqlspec/adapters/duckdb/config.py +79 -21
- sqlspec/adapters/duckdb/data_dictionary.py +41 -2
- sqlspec/adapters/duckdb/driver.py +138 -43
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +332 -0
- sqlspec/adapters/duckdb/pool.py +5 -5
- sqlspec/adapters/duckdb/type_converter.py +51 -21
- sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
- sqlspec/adapters/oracledb/_types.py +20 -2
- sqlspec/adapters/oracledb/adk/__init__.py +5 -0
- sqlspec/adapters/oracledb/adk/store.py +1745 -0
- sqlspec/adapters/oracledb/config.py +120 -36
- sqlspec/adapters/oracledb/data_dictionary.py +87 -20
- sqlspec/adapters/oracledb/driver.py +292 -84
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +767 -0
- sqlspec/adapters/oracledb/migrations.py +316 -25
- sqlspec/adapters/oracledb/type_converter.py +91 -16
- sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
- sqlspec/adapters/psqlpy/_types.py +2 -1
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +482 -0
- sqlspec/adapters/psqlpy/config.py +45 -19
- sqlspec/adapters/psqlpy/data_dictionary.py +41 -2
- sqlspec/adapters/psqlpy/driver.py +101 -31
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +272 -0
- sqlspec/adapters/psqlpy/type_converter.py +40 -11
- sqlspec/adapters/psycopg/_type_handlers.py +80 -0
- sqlspec/adapters/psycopg/_types.py +2 -1
- sqlspec/adapters/psycopg/adk/__init__.py +5 -0
- sqlspec/adapters/psycopg/adk/store.py +944 -0
- sqlspec/adapters/psycopg/config.py +65 -37
- sqlspec/adapters/psycopg/data_dictionary.py +77 -3
- sqlspec/adapters/psycopg/driver.py +200 -78
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +554 -0
- sqlspec/adapters/sqlite/__init__.py +2 -1
- sqlspec/adapters/sqlite/_type_handlers.py +86 -0
- sqlspec/adapters/sqlite/_types.py +1 -1
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +572 -0
- sqlspec/adapters/sqlite/config.py +85 -16
- sqlspec/adapters/sqlite/data_dictionary.py +34 -2
- sqlspec/adapters/sqlite/driver.py +120 -52
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +318 -0
- sqlspec/adapters/sqlite/pool.py +5 -5
- sqlspec/base.py +45 -26
- sqlspec/builder/__init__.py +73 -4
- sqlspec/builder/_base.py +91 -58
- sqlspec/builder/_column.py +5 -5
- sqlspec/builder/_ddl.py +98 -89
- sqlspec/builder/_delete.py +5 -4
- sqlspec/builder/_dml.py +388 -0
- sqlspec/{_sql.py → builder/_factory.py} +41 -44
- sqlspec/builder/_insert.py +5 -82
- sqlspec/builder/{mixins/_join_operations.py → _join.py} +145 -143
- sqlspec/builder/_merge.py +446 -11
- sqlspec/builder/_parsing_utils.py +9 -11
- sqlspec/builder/_select.py +1313 -25
- sqlspec/builder/_update.py +11 -42
- sqlspec/cli.py +76 -69
- sqlspec/config.py +231 -60
- sqlspec/core/__init__.py +5 -4
- sqlspec/core/cache.py +18 -18
- sqlspec/core/compiler.py +6 -8
- sqlspec/core/filters.py +37 -37
- sqlspec/core/hashing.py +9 -9
- sqlspec/core/parameters.py +76 -45
- sqlspec/core/result.py +102 -46
- sqlspec/core/splitter.py +16 -17
- sqlspec/core/statement.py +32 -31
- sqlspec/core/type_conversion.py +3 -2
- sqlspec/driver/__init__.py +1 -3
- sqlspec/driver/_async.py +95 -161
- sqlspec/driver/_common.py +133 -80
- sqlspec/driver/_sync.py +95 -162
- sqlspec/driver/mixins/_result_tools.py +20 -236
- sqlspec/driver/mixins/_sql_translator.py +4 -4
- sqlspec/exceptions.py +70 -7
- sqlspec/extensions/adk/__init__.py +53 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +181 -0
- sqlspec/extensions/adk/store.py +536 -0
- sqlspec/extensions/aiosql/adapter.py +73 -53
- sqlspec/extensions/litestar/__init__.py +21 -4
- sqlspec/extensions/litestar/cli.py +54 -10
- sqlspec/extensions/litestar/config.py +59 -266
- sqlspec/extensions/litestar/handlers.py +46 -17
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +324 -223
- sqlspec/extensions/litestar/providers.py +25 -25
- sqlspec/extensions/litestar/store.py +265 -0
- sqlspec/loader.py +30 -49
- sqlspec/migrations/base.py +200 -76
- sqlspec/migrations/commands.py +591 -62
- sqlspec/migrations/context.py +6 -9
- sqlspec/migrations/fix.py +199 -0
- sqlspec/migrations/loaders.py +47 -19
- sqlspec/migrations/runner.py +241 -75
- sqlspec/migrations/tracker.py +237 -21
- sqlspec/migrations/utils.py +51 -3
- sqlspec/migrations/validation.py +177 -0
- sqlspec/protocols.py +66 -36
- sqlspec/storage/_utils.py +98 -0
- sqlspec/storage/backends/fsspec.py +134 -106
- sqlspec/storage/backends/local.py +78 -51
- sqlspec/storage/backends/obstore.py +278 -162
- sqlspec/storage/registry.py +75 -39
- sqlspec/typing.py +14 -84
- sqlspec/utils/config_resolver.py +6 -6
- sqlspec/utils/correlation.py +4 -5
- sqlspec/utils/data_transformation.py +3 -2
- sqlspec/utils/deprecation.py +9 -8
- sqlspec/utils/fixtures.py +4 -4
- sqlspec/utils/logging.py +46 -6
- sqlspec/utils/module_loader.py +2 -2
- sqlspec/utils/schema.py +288 -0
- sqlspec/utils/serializers.py +3 -3
- sqlspec/utils/sync_tools.py +21 -17
- sqlspec/utils/text.py +1 -2
- sqlspec/utils/type_guards.py +111 -20
- sqlspec/utils/version.py +433 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/METADATA +40 -21
- sqlspec-0.27.0.dist-info/RECORD +207 -0
- sqlspec/builder/mixins/__init__.py +0 -55
- sqlspec/builder/mixins/_cte_and_set_ops.py +0 -253
- sqlspec/builder/mixins/_delete_operations.py +0 -50
- sqlspec/builder/mixins/_insert_operations.py +0 -282
- sqlspec/builder/mixins/_merge_operations.py +0 -698
- sqlspec/builder/mixins/_order_limit_operations.py +0 -145
- sqlspec/builder/mixins/_pivot_operations.py +0 -157
- sqlspec/builder/mixins/_select_operations.py +0 -930
- sqlspec/builder/mixins/_update_operations.py +0 -199
- sqlspec/builder/mixins/_where_clause.py +0 -1298
- sqlspec-0.26.0.dist-info/RECORD +0 -157
- sqlspec-0.26.0.dist-info/licenses/NOTICE +0 -29
- {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.27.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,504 @@
|
|
|
1
|
+
"""ADBC session store for Litestar integration with multi-dialect support.
|
|
2
|
+
|
|
3
|
+
ADBC (Arrow Database Connectivity) supports multiple database backends including
|
|
4
|
+
PostgreSQL, SQLite, DuckDB, BigQuery, MySQL, and Snowflake. This store automatically
|
|
5
|
+
detects the dialect and adapts SQL syntax accordingly.
|
|
6
|
+
|
|
7
|
+
Supports:
|
|
8
|
+
- PostgreSQL: BYTEA data type, TIMESTAMPTZ, $1 parameters, ON CONFLICT
|
|
9
|
+
- SQLite: BLOB data type, DATETIME, ? parameters, INSERT OR REPLACE
|
|
10
|
+
- DuckDB: BLOB data type, TIMESTAMP, ? parameters, ON CONFLICT
|
|
11
|
+
- MySQL/MariaDB: BLOB data type, DATETIME, %s parameters, ON DUPLICATE KEY UPDATE
|
|
12
|
+
- BigQuery: BYTES data type, TIMESTAMP, @param parameters, MERGE
|
|
13
|
+
- Snowflake: BINARY data type, TIMESTAMP WITH TIME ZONE, ? parameters, MERGE
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
from datetime import datetime, timedelta, timezone
|
|
17
|
+
from typing import TYPE_CHECKING
|
|
18
|
+
|
|
19
|
+
from sqlspec.extensions.litestar.store import BaseSQLSpecStore
|
|
20
|
+
from sqlspec.utils.logging import get_logger
|
|
21
|
+
from sqlspec.utils.sync_tools import async_
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from sqlspec.adapters.adbc.config import AdbcConfig
|
|
25
|
+
|
|
26
|
+
logger = get_logger("adapters.adbc.litestar.store")
|
|
27
|
+
|
|
28
|
+
__all__ = ("ADBCStore",)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class ADBCStore(BaseSQLSpecStore["AdbcConfig"]):
|
|
32
|
+
"""ADBC session store using synchronous ADBC driver.
|
|
33
|
+
|
|
34
|
+
Implements server-side session storage for Litestar using ADBC
|
|
35
|
+
(Arrow Database Connectivity) via the synchronous driver. Uses
|
|
36
|
+
Litestar's sync_to_thread utility to provide an async interface
|
|
37
|
+
compatible with the Store protocol.
|
|
38
|
+
|
|
39
|
+
ADBC supports multiple database backends (PostgreSQL, SQLite, DuckDB, etc.).
|
|
40
|
+
The SQL schema is optimized for PostgreSQL by default, but can work with
|
|
41
|
+
other backends that support TIMESTAMPTZ and BYTEA equivalents.
|
|
42
|
+
|
|
43
|
+
Provides efficient session management with:
|
|
44
|
+
- Sync operations wrapped for async compatibility
|
|
45
|
+
- INSERT ON CONFLICT (UPSERT) for PostgreSQL
|
|
46
|
+
- Automatic expiration handling with TIMESTAMPTZ
|
|
47
|
+
- Efficient cleanup of expired sessions
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
config: AdbcConfig instance.
|
|
51
|
+
|
|
52
|
+
Example:
|
|
53
|
+
from sqlspec.adapters.adbc import AdbcConfig
|
|
54
|
+
from sqlspec.adapters.adbc.litestar.store import ADBCStore
|
|
55
|
+
|
|
56
|
+
config = AdbcConfig(
|
|
57
|
+
connection_config={
|
|
58
|
+
"uri": "postgresql://user:pass@localhost/db"
|
|
59
|
+
}
|
|
60
|
+
)
|
|
61
|
+
store = ADBCStore(config)
|
|
62
|
+
await store.create_table()
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
__slots__ = ("_dialect",)
|
|
66
|
+
|
|
67
|
+
def __init__(self, config: "AdbcConfig") -> None:
|
|
68
|
+
"""Initialize ADBC session store.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
config: AdbcConfig instance.
|
|
72
|
+
|
|
73
|
+
Notes:
|
|
74
|
+
Table name is read from config.extension_config["litestar"]["session_table"].
|
|
75
|
+
"""
|
|
76
|
+
super().__init__(config)
|
|
77
|
+
self._dialect: str | None = None
|
|
78
|
+
|
|
79
|
+
def _get_dialect(self) -> str:
|
|
80
|
+
"""Get the database dialect, caching it after first access.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Dialect name (postgres, sqlite, duckdb, mysql, bigquery, snowflake).
|
|
84
|
+
"""
|
|
85
|
+
if self._dialect is not None:
|
|
86
|
+
return self._dialect
|
|
87
|
+
|
|
88
|
+
with self._config.provide_session() as driver:
|
|
89
|
+
dialect_value = getattr(driver, "dialect", None)
|
|
90
|
+
self._dialect = str(dialect_value) if dialect_value else "postgres"
|
|
91
|
+
|
|
92
|
+
assert self._dialect is not None
|
|
93
|
+
return self._dialect
|
|
94
|
+
|
|
95
|
+
def _get_create_table_sql(self) -> str:
|
|
96
|
+
"""Get dialect-specific CREATE TABLE SQL for ADBC.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
SQL statement to create the sessions table with proper indexes.
|
|
100
|
+
|
|
101
|
+
Notes:
|
|
102
|
+
Automatically adapts to the detected database dialect:
|
|
103
|
+
- PostgreSQL: BYTEA, TIMESTAMPTZ with partial index
|
|
104
|
+
- SQLite: BLOB, DATETIME
|
|
105
|
+
- DuckDB: BLOB, TIMESTAMP
|
|
106
|
+
- MySQL/MariaDB: BLOB, DATETIME
|
|
107
|
+
- BigQuery: BYTES, TIMESTAMP
|
|
108
|
+
- Snowflake: BINARY, TIMESTAMP WITH TIME ZONE
|
|
109
|
+
"""
|
|
110
|
+
dialect = self._get_dialect()
|
|
111
|
+
|
|
112
|
+
if dialect in {"postgres", "postgresql"}:
|
|
113
|
+
return f"""
|
|
114
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
115
|
+
session_id TEXT PRIMARY KEY,
|
|
116
|
+
data BYTEA NOT NULL,
|
|
117
|
+
expires_at TIMESTAMPTZ
|
|
118
|
+
);
|
|
119
|
+
CREATE INDEX IF NOT EXISTS idx_{self._table_name}_expires_at
|
|
120
|
+
ON {self._table_name}(expires_at) WHERE expires_at IS NOT NULL;
|
|
121
|
+
"""
|
|
122
|
+
|
|
123
|
+
if dialect == "sqlite":
|
|
124
|
+
return f"""
|
|
125
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
126
|
+
session_id TEXT PRIMARY KEY,
|
|
127
|
+
data BLOB NOT NULL,
|
|
128
|
+
expires_at DATETIME
|
|
129
|
+
);
|
|
130
|
+
CREATE INDEX IF NOT EXISTS idx_{self._table_name}_expires_at
|
|
131
|
+
ON {self._table_name}(expires_at);
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
if dialect == "duckdb":
|
|
135
|
+
return f"""
|
|
136
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
137
|
+
session_id VARCHAR PRIMARY KEY,
|
|
138
|
+
data BLOB NOT NULL,
|
|
139
|
+
expires_at TIMESTAMP
|
|
140
|
+
);
|
|
141
|
+
CREATE INDEX IF NOT EXISTS idx_{self._table_name}_expires_at
|
|
142
|
+
ON {self._table_name}(expires_at);
|
|
143
|
+
"""
|
|
144
|
+
|
|
145
|
+
if dialect in {"mysql", "mariadb"}:
|
|
146
|
+
return f"""
|
|
147
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
148
|
+
session_id VARCHAR(255) PRIMARY KEY,
|
|
149
|
+
data BLOB NOT NULL,
|
|
150
|
+
expires_at DATETIME
|
|
151
|
+
);
|
|
152
|
+
CREATE INDEX idx_{self._table_name}_expires_at
|
|
153
|
+
ON {self._table_name}(expires_at);
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
if dialect == "bigquery":
|
|
157
|
+
return f"""
|
|
158
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
159
|
+
session_id STRING NOT NULL,
|
|
160
|
+
data BYTES NOT NULL,
|
|
161
|
+
expires_at TIMESTAMP
|
|
162
|
+
);
|
|
163
|
+
CREATE INDEX idx_{self._table_name}_expires_at
|
|
164
|
+
ON {self._table_name}(expires_at);
|
|
165
|
+
"""
|
|
166
|
+
|
|
167
|
+
if dialect == "snowflake":
|
|
168
|
+
return f"""
|
|
169
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
170
|
+
session_id VARCHAR(255) PRIMARY KEY,
|
|
171
|
+
data BINARY NOT NULL,
|
|
172
|
+
expires_at TIMESTAMP WITH TIME ZONE
|
|
173
|
+
);
|
|
174
|
+
CREATE INDEX IF NOT EXISTS idx_{self._table_name}_expires_at
|
|
175
|
+
ON {self._table_name}(expires_at);
|
|
176
|
+
"""
|
|
177
|
+
|
|
178
|
+
return f"""
|
|
179
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
180
|
+
session_id TEXT PRIMARY KEY,
|
|
181
|
+
data BYTEA NOT NULL,
|
|
182
|
+
expires_at TIMESTAMPTZ
|
|
183
|
+
);
|
|
184
|
+
CREATE INDEX IF NOT EXISTS idx_{self._table_name}_expires_at
|
|
185
|
+
ON {self._table_name}(expires_at);
|
|
186
|
+
"""
|
|
187
|
+
|
|
188
|
+
def _get_param_placeholder(self, position: int) -> str:
|
|
189
|
+
"""Get the parameter placeholder syntax for the current dialect.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
position: 1-based parameter position.
|
|
193
|
+
|
|
194
|
+
Returns:
|
|
195
|
+
Parameter placeholder string (e.g., '$1', '?', '%s', '@param1').
|
|
196
|
+
"""
|
|
197
|
+
dialect = self._get_dialect()
|
|
198
|
+
|
|
199
|
+
if dialect in {"postgres", "postgresql"}:
|
|
200
|
+
return f"${position}"
|
|
201
|
+
if dialect in {"mysql", "mariadb"}:
|
|
202
|
+
return "%s"
|
|
203
|
+
if dialect == "bigquery":
|
|
204
|
+
return f"@param{position}"
|
|
205
|
+
return "?"
|
|
206
|
+
|
|
207
|
+
def _get_current_timestamp_expr(self) -> str:
|
|
208
|
+
"""Get the current timestamp expression for the current dialect.
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
SQL expression for getting current timestamp with timezone.
|
|
212
|
+
"""
|
|
213
|
+
dialect = self._get_dialect()
|
|
214
|
+
|
|
215
|
+
if dialect in {"postgres", "postgresql"}:
|
|
216
|
+
return "CURRENT_TIMESTAMP AT TIME ZONE 'UTC'"
|
|
217
|
+
if dialect in {"mysql", "mariadb"}:
|
|
218
|
+
return "UTC_TIMESTAMP()"
|
|
219
|
+
if dialect == "bigquery":
|
|
220
|
+
return "CURRENT_TIMESTAMP()"
|
|
221
|
+
if dialect == "snowflake":
|
|
222
|
+
return "CONVERT_TIMEZONE('UTC', CURRENT_TIMESTAMP())"
|
|
223
|
+
return "CURRENT_TIMESTAMP"
|
|
224
|
+
|
|
225
|
+
def _create_table(self) -> None:
|
|
226
|
+
"""Synchronous implementation of create_table using ADBC driver."""
|
|
227
|
+
sql_text = self._get_create_table_sql()
|
|
228
|
+
with self._config.provide_session() as driver:
|
|
229
|
+
driver.execute_script(sql_text)
|
|
230
|
+
driver.commit()
|
|
231
|
+
logger.debug("Created session table: %s", self._table_name)
|
|
232
|
+
|
|
233
|
+
def _get_drop_table_sql(self) -> "list[str]":
|
|
234
|
+
"""Get dialect-specific DROP TABLE SQL statements for ADBC.
|
|
235
|
+
|
|
236
|
+
Returns:
|
|
237
|
+
List of SQL statements to drop indexes and table.
|
|
238
|
+
"""
|
|
239
|
+
dialect = self._get_dialect()
|
|
240
|
+
|
|
241
|
+
if dialect in {"mysql", "mariadb"}:
|
|
242
|
+
return [
|
|
243
|
+
f"DROP INDEX idx_{self._table_name}_expires_at ON {self._table_name}",
|
|
244
|
+
f"DROP TABLE IF EXISTS {self._table_name}",
|
|
245
|
+
]
|
|
246
|
+
|
|
247
|
+
return [f"DROP INDEX IF EXISTS idx_{self._table_name}_expires_at", f"DROP TABLE IF EXISTS {self._table_name}"]
|
|
248
|
+
|
|
249
|
+
async def create_table(self) -> None:
|
|
250
|
+
"""Create the session table if it doesn't exist."""
|
|
251
|
+
await async_(self._create_table)()
|
|
252
|
+
|
|
253
|
+
def _get(self, key: str, renew_for: "int | timedelta | None" = None) -> "bytes | None":
|
|
254
|
+
"""Synchronous implementation of get using ADBC driver."""
|
|
255
|
+
p1 = self._get_param_placeholder(1)
|
|
256
|
+
current_ts = self._get_current_timestamp_expr()
|
|
257
|
+
|
|
258
|
+
sql = f"""
|
|
259
|
+
SELECT data, expires_at FROM {self._table_name}
|
|
260
|
+
WHERE session_id = {p1}
|
|
261
|
+
AND (expires_at IS NULL OR expires_at > {current_ts})
|
|
262
|
+
"""
|
|
263
|
+
|
|
264
|
+
with self._config.provide_session() as driver:
|
|
265
|
+
result = driver.select_one_or_none(sql, key)
|
|
266
|
+
|
|
267
|
+
if result is None:
|
|
268
|
+
return None
|
|
269
|
+
|
|
270
|
+
data = result["data"]
|
|
271
|
+
expires_at = result["expires_at"]
|
|
272
|
+
|
|
273
|
+
if renew_for is not None and expires_at is not None:
|
|
274
|
+
new_expires_at = self._calculate_expires_at(renew_for)
|
|
275
|
+
p1_update = self._get_param_placeholder(1)
|
|
276
|
+
p2_update = self._get_param_placeholder(2)
|
|
277
|
+
update_sql = f"""
|
|
278
|
+
UPDATE {self._table_name}
|
|
279
|
+
SET expires_at = {p1_update}
|
|
280
|
+
WHERE session_id = {p2_update}
|
|
281
|
+
"""
|
|
282
|
+
driver.execute(update_sql, new_expires_at, key)
|
|
283
|
+
driver.commit()
|
|
284
|
+
|
|
285
|
+
return bytes(data)
|
|
286
|
+
|
|
287
|
+
async def get(self, key: str, renew_for: "int | timedelta | None" = None) -> "bytes | None":
|
|
288
|
+
"""Get a session value by key.
|
|
289
|
+
|
|
290
|
+
Args:
|
|
291
|
+
key: Session ID to retrieve.
|
|
292
|
+
renew_for: If given, renew the expiry time for this duration.
|
|
293
|
+
|
|
294
|
+
Returns:
|
|
295
|
+
Session data as bytes if found and not expired, None otherwise.
|
|
296
|
+
"""
|
|
297
|
+
return await async_(self._get)(key, renew_for)
|
|
298
|
+
|
|
299
|
+
def _set(self, key: str, value: "str | bytes", expires_in: "int | timedelta | None" = None) -> None:
|
|
300
|
+
"""Synchronous implementation of set using ADBC driver with dialect-specific UPSERT."""
|
|
301
|
+
data = self._value_to_bytes(value)
|
|
302
|
+
expires_at = self._calculate_expires_at(expires_in)
|
|
303
|
+
dialect = self._get_dialect()
|
|
304
|
+
|
|
305
|
+
p1 = self._get_param_placeholder(1)
|
|
306
|
+
p2 = self._get_param_placeholder(2)
|
|
307
|
+
p3 = self._get_param_placeholder(3)
|
|
308
|
+
|
|
309
|
+
if dialect in {"postgres", "postgresql", "sqlite", "duckdb"}:
|
|
310
|
+
if dialect == "sqlite":
|
|
311
|
+
sql = f"""
|
|
312
|
+
INSERT OR REPLACE INTO {self._table_name} (session_id, data, expires_at)
|
|
313
|
+
VALUES ({p1}, {p2}, {p3})
|
|
314
|
+
"""
|
|
315
|
+
else:
|
|
316
|
+
sql = f"""
|
|
317
|
+
INSERT INTO {self._table_name} (session_id, data, expires_at)
|
|
318
|
+
VALUES ({p1}, {p2}, {p3})
|
|
319
|
+
ON CONFLICT (session_id) DO UPDATE
|
|
320
|
+
SET data = EXCLUDED.data, expires_at = EXCLUDED.expires_at
|
|
321
|
+
"""
|
|
322
|
+
elif dialect in {"mysql", "mariadb"}:
|
|
323
|
+
sql = f"""
|
|
324
|
+
INSERT INTO {self._table_name} (session_id, data, expires_at)
|
|
325
|
+
VALUES ({p1}, {p2}, {p3})
|
|
326
|
+
ON DUPLICATE KEY UPDATE data = VALUES(data), expires_at = VALUES(expires_at)
|
|
327
|
+
"""
|
|
328
|
+
elif dialect in {"bigquery", "snowflake"}:
|
|
329
|
+
with self._config.provide_session() as driver:
|
|
330
|
+
check_sql = f"SELECT COUNT(*) as count FROM {self._table_name} WHERE session_id = {p1}"
|
|
331
|
+
result = driver.select_one(check_sql, key)
|
|
332
|
+
exists = result and result.get("count", 0) > 0
|
|
333
|
+
|
|
334
|
+
if exists:
|
|
335
|
+
sql = f"""
|
|
336
|
+
UPDATE {self._table_name}
|
|
337
|
+
SET data = {p1}, expires_at = {p2}
|
|
338
|
+
WHERE session_id = {p3}
|
|
339
|
+
"""
|
|
340
|
+
driver.execute(sql, data, expires_at, key)
|
|
341
|
+
else:
|
|
342
|
+
sql = f"""
|
|
343
|
+
INSERT INTO {self._table_name} (session_id, data, expires_at)
|
|
344
|
+
VALUES ({p1}, {p2}, {p3})
|
|
345
|
+
"""
|
|
346
|
+
driver.execute(sql, key, data, expires_at)
|
|
347
|
+
driver.commit()
|
|
348
|
+
return
|
|
349
|
+
else:
|
|
350
|
+
sql = f"""
|
|
351
|
+
INSERT INTO {self._table_name} (session_id, data, expires_at)
|
|
352
|
+
VALUES ({p1}, {p2}, {p3})
|
|
353
|
+
ON CONFLICT (session_id) DO UPDATE
|
|
354
|
+
SET data = EXCLUDED.data, expires_at = EXCLUDED.expires_at
|
|
355
|
+
"""
|
|
356
|
+
|
|
357
|
+
with self._config.provide_session() as driver:
|
|
358
|
+
driver.execute(sql, key, data, expires_at)
|
|
359
|
+
driver.commit()
|
|
360
|
+
|
|
361
|
+
async def set(self, key: str, value: "str | bytes", expires_in: "int | timedelta | None" = None) -> None:
|
|
362
|
+
"""Store a session value.
|
|
363
|
+
|
|
364
|
+
Args:
|
|
365
|
+
key: Session ID.
|
|
366
|
+
value: Session data.
|
|
367
|
+
expires_in: Time until expiration.
|
|
368
|
+
"""
|
|
369
|
+
await async_(self._set)(key, value, expires_in)
|
|
370
|
+
|
|
371
|
+
def _delete(self, key: str) -> None:
|
|
372
|
+
"""Synchronous implementation of delete using ADBC driver."""
|
|
373
|
+
p1 = self._get_param_placeholder(1)
|
|
374
|
+
sql = f"DELETE FROM {self._table_name} WHERE session_id = {p1}"
|
|
375
|
+
|
|
376
|
+
with self._config.provide_session() as driver:
|
|
377
|
+
driver.execute(sql, key)
|
|
378
|
+
driver.commit()
|
|
379
|
+
|
|
380
|
+
async def delete(self, key: str) -> None:
|
|
381
|
+
"""Delete a session by key.
|
|
382
|
+
|
|
383
|
+
Args:
|
|
384
|
+
key: Session ID to delete.
|
|
385
|
+
"""
|
|
386
|
+
await async_(self._delete)(key)
|
|
387
|
+
|
|
388
|
+
def _delete_all(self) -> None:
|
|
389
|
+
"""Synchronous implementation of delete_all using ADBC driver."""
|
|
390
|
+
|
|
391
|
+
sql = f"DELETE FROM {self._table_name}"
|
|
392
|
+
|
|
393
|
+
with self._config.provide_session() as driver:
|
|
394
|
+
driver.execute(sql)
|
|
395
|
+
driver.commit()
|
|
396
|
+
|
|
397
|
+
async def delete_all(self) -> None:
|
|
398
|
+
"""Delete all sessions from the store."""
|
|
399
|
+
await async_(self._delete_all)()
|
|
400
|
+
|
|
401
|
+
def _exists(self, key: str) -> bool:
|
|
402
|
+
"""Synchronous implementation of exists using ADBC driver."""
|
|
403
|
+
|
|
404
|
+
p1 = self._get_param_placeholder(1)
|
|
405
|
+
current_ts = self._get_current_timestamp_expr()
|
|
406
|
+
|
|
407
|
+
sql = f"""
|
|
408
|
+
SELECT 1 FROM {self._table_name}
|
|
409
|
+
WHERE session_id = {p1}
|
|
410
|
+
AND (expires_at IS NULL OR expires_at > {current_ts})
|
|
411
|
+
"""
|
|
412
|
+
|
|
413
|
+
with self._config.provide_session() as driver:
|
|
414
|
+
return bool(driver.select_one_or_none(sql, key) is not None)
|
|
415
|
+
|
|
416
|
+
async def exists(self, key: str) -> bool:
|
|
417
|
+
"""Check if a session key exists and is not expired.
|
|
418
|
+
|
|
419
|
+
Args:
|
|
420
|
+
key: Session ID to check.
|
|
421
|
+
|
|
422
|
+
Returns:
|
|
423
|
+
True if the session exists and is not expired.
|
|
424
|
+
"""
|
|
425
|
+
return await async_(self._exists)(key)
|
|
426
|
+
|
|
427
|
+
def _expires_in(self, key: str) -> "int | None":
|
|
428
|
+
"""Synchronous implementation of expires_in using ADBC driver."""
|
|
429
|
+
p1 = self._get_param_placeholder(1)
|
|
430
|
+
sql = f"""
|
|
431
|
+
SELECT expires_at FROM {self._table_name}
|
|
432
|
+
WHERE session_id = {p1}
|
|
433
|
+
"""
|
|
434
|
+
|
|
435
|
+
with self._config.provide_session() as driver:
|
|
436
|
+
result = driver.select_one(sql, key)
|
|
437
|
+
|
|
438
|
+
if result is None or result.get("expires_at") is None:
|
|
439
|
+
return None
|
|
440
|
+
|
|
441
|
+
expires_at = result["expires_at"]
|
|
442
|
+
|
|
443
|
+
if not isinstance(expires_at, datetime):
|
|
444
|
+
return None
|
|
445
|
+
|
|
446
|
+
if expires_at.tzinfo is None:
|
|
447
|
+
expires_at = expires_at.replace(tzinfo=timezone.utc)
|
|
448
|
+
|
|
449
|
+
now = datetime.now(timezone.utc)
|
|
450
|
+
|
|
451
|
+
if expires_at <= now:
|
|
452
|
+
return 0
|
|
453
|
+
|
|
454
|
+
delta = expires_at - now
|
|
455
|
+
return int(delta.total_seconds())
|
|
456
|
+
|
|
457
|
+
async def expires_in(self, key: str) -> "int | None":
|
|
458
|
+
"""Get the time in seconds until the session expires.
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
key: Session ID to check.
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
Seconds until expiration, or None if no expiry or key doesn't exist.
|
|
465
|
+
"""
|
|
466
|
+
return await async_(self._expires_in)(key)
|
|
467
|
+
|
|
468
|
+
def _delete_expired(self) -> int:
|
|
469
|
+
"""Synchronous implementation of delete_expired using ADBC driver."""
|
|
470
|
+
current_ts = self._get_current_timestamp_expr()
|
|
471
|
+
dialect = self._get_dialect()
|
|
472
|
+
|
|
473
|
+
if dialect in {"postgres", "postgresql"}:
|
|
474
|
+
sql = f"DELETE FROM {self._table_name} WHERE expires_at <= {current_ts} RETURNING session_id"
|
|
475
|
+
else:
|
|
476
|
+
count_sql = f"SELECT COUNT(*) as count FROM {self._table_name} WHERE expires_at <= {current_ts}"
|
|
477
|
+
delete_sql = f"DELETE FROM {self._table_name} WHERE expires_at <= {current_ts}"
|
|
478
|
+
|
|
479
|
+
with self._config.provide_session() as driver:
|
|
480
|
+
result = driver.select_one(count_sql)
|
|
481
|
+
count = result.get("count", 0) if result else 0
|
|
482
|
+
|
|
483
|
+
if count > 0:
|
|
484
|
+
driver.execute(delete_sql)
|
|
485
|
+
driver.commit()
|
|
486
|
+
logger.debug("Cleaned up %d expired sessions", count)
|
|
487
|
+
|
|
488
|
+
return count
|
|
489
|
+
|
|
490
|
+
with self._config.provide_session() as driver:
|
|
491
|
+
exec_result = driver.execute(sql)
|
|
492
|
+
driver.commit()
|
|
493
|
+
count = exec_result.rows_affected
|
|
494
|
+
if count > 0:
|
|
495
|
+
logger.debug("Cleaned up %d expired sessions", count)
|
|
496
|
+
return count
|
|
497
|
+
|
|
498
|
+
async def delete_expired(self) -> int:
|
|
499
|
+
"""Delete all expired sessions.
|
|
500
|
+
|
|
501
|
+
Returns:
|
|
502
|
+
Number of sessions deleted.
|
|
503
|
+
"""
|
|
504
|
+
return await async_(self._delete_expired)()
|
|
@@ -5,32 +5,65 @@ type conversion for different database backends (PostgreSQL, SQLite, DuckDB,
|
|
|
5
5
|
MySQL, BigQuery, Snowflake).
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
from
|
|
8
|
+
from functools import lru_cache
|
|
9
|
+
from typing import Any, Final
|
|
9
10
|
|
|
10
11
|
from sqlspec.core.type_conversion import BaseTypeConverter
|
|
11
12
|
from sqlspec.utils.serializers import to_json
|
|
12
13
|
|
|
14
|
+
ADBC_SPECIAL_CHARS: Final[frozenset[str]] = frozenset({"{", "[", "-", ":", "T", "."})
|
|
15
|
+
|
|
13
16
|
|
|
14
17
|
class ADBCTypeConverter(BaseTypeConverter):
|
|
15
18
|
"""ADBC-specific type converter with dialect awareness.
|
|
16
19
|
|
|
17
20
|
Extends the base BaseTypeConverter with ADBC multi-backend functionality
|
|
18
21
|
including dialect-specific type handling for different database systems.
|
|
22
|
+
Includes per-instance LRU cache for improved performance.
|
|
19
23
|
"""
|
|
20
24
|
|
|
21
|
-
__slots__ = ("dialect"
|
|
25
|
+
__slots__ = ("_convert_cache", "dialect")
|
|
22
26
|
|
|
23
|
-
def __init__(self, dialect: str) -> None:
|
|
24
|
-
"""Initialize with dialect-specific configuration.
|
|
27
|
+
def __init__(self, dialect: str, cache_size: int = 5000) -> None:
|
|
28
|
+
"""Initialize with dialect-specific configuration and conversion cache.
|
|
25
29
|
|
|
26
30
|
Args:
|
|
27
31
|
dialect: Target database dialect (postgres, sqlite, duckdb, etc.)
|
|
32
|
+
cache_size: Maximum number of string values to cache (default: 5000)
|
|
28
33
|
"""
|
|
29
34
|
super().__init__()
|
|
30
35
|
self.dialect = dialect.lower()
|
|
31
36
|
|
|
37
|
+
@lru_cache(maxsize=cache_size)
|
|
38
|
+
def _cached_convert(value: str) -> Any:
|
|
39
|
+
if not value or not any(c in value for c in ADBC_SPECIAL_CHARS):
|
|
40
|
+
return value
|
|
41
|
+
detected_type = self.detect_type(value)
|
|
42
|
+
if detected_type:
|
|
43
|
+
try:
|
|
44
|
+
if self.dialect in {"postgres", "postgresql"}:
|
|
45
|
+
if detected_type in {"uuid", "interval"}:
|
|
46
|
+
return self.convert_value(value, detected_type)
|
|
47
|
+
elif self.dialect == "duckdb":
|
|
48
|
+
if detected_type == "uuid":
|
|
49
|
+
return self.convert_value(value, detected_type)
|
|
50
|
+
elif self.dialect == "sqlite":
|
|
51
|
+
if detected_type == "uuid":
|
|
52
|
+
return str(value)
|
|
53
|
+
elif self.dialect == "bigquery":
|
|
54
|
+
if detected_type == "uuid":
|
|
55
|
+
return self.convert_value(value, detected_type)
|
|
56
|
+
elif self.dialect in {"mysql", "snowflake"} and detected_type in {"uuid", "json"}:
|
|
57
|
+
return self.convert_value(value, detected_type)
|
|
58
|
+
return self.convert_value(value, detected_type)
|
|
59
|
+
except Exception:
|
|
60
|
+
return value
|
|
61
|
+
return value
|
|
62
|
+
|
|
63
|
+
self._convert_cache = _cached_convert
|
|
64
|
+
|
|
32
65
|
def convert_if_detected(self, value: Any) -> Any:
|
|
33
|
-
"""Convert value with dialect-specific handling.
|
|
66
|
+
"""Convert value with dialect-specific handling (cached).
|
|
34
67
|
|
|
35
68
|
Args:
|
|
36
69
|
value: Value to potentially convert.
|
|
@@ -40,37 +73,7 @@ class ADBCTypeConverter(BaseTypeConverter):
|
|
|
40
73
|
"""
|
|
41
74
|
if not isinstance(value, str):
|
|
42
75
|
return value
|
|
43
|
-
|
|
44
|
-
if not any(c in value for c in ["{", "[", "-", ":", "T"]):
|
|
45
|
-
return value
|
|
46
|
-
|
|
47
|
-
detected_type = self.detect_type(value)
|
|
48
|
-
if detected_type:
|
|
49
|
-
try:
|
|
50
|
-
if self.dialect in {"postgres", "postgresql"}:
|
|
51
|
-
if detected_type in {"uuid", "interval"}:
|
|
52
|
-
return self.convert_value(value, detected_type)
|
|
53
|
-
|
|
54
|
-
elif self.dialect == "duckdb":
|
|
55
|
-
if detected_type == "uuid":
|
|
56
|
-
return self.convert_value(value, detected_type)
|
|
57
|
-
|
|
58
|
-
elif self.dialect == "sqlite":
|
|
59
|
-
if detected_type == "uuid":
|
|
60
|
-
return str(value)
|
|
61
|
-
|
|
62
|
-
elif self.dialect == "bigquery":
|
|
63
|
-
if detected_type == "uuid":
|
|
64
|
-
return self.convert_value(value, detected_type)
|
|
65
|
-
|
|
66
|
-
elif self.dialect in {"mysql", "snowflake"} and detected_type in {"uuid", "json"}:
|
|
67
|
-
return self.convert_value(value, detected_type)
|
|
68
|
-
|
|
69
|
-
return self.convert_value(value, detected_type)
|
|
70
|
-
except Exception:
|
|
71
|
-
return value
|
|
72
|
-
|
|
73
|
-
return value
|
|
76
|
+
return self._convert_cache(value)
|
|
74
77
|
|
|
75
78
|
def convert_dict(self, value: dict[str, Any]) -> Any:
|
|
76
79
|
"""Convert dictionary values with dialect-specific handling.
|
|
@@ -81,13 +84,8 @@ class ADBCTypeConverter(BaseTypeConverter):
|
|
|
81
84
|
Returns:
|
|
82
85
|
Converted value appropriate for the dialect.
|
|
83
86
|
"""
|
|
84
|
-
|
|
85
|
-
# For dialects that cannot handle raw dicts (like ADBC PostgreSQL),
|
|
86
|
-
# convert to JSON strings
|
|
87
87
|
if self.dialect in {"postgres", "postgresql", "bigquery"}:
|
|
88
88
|
return to_json(value)
|
|
89
|
-
|
|
90
|
-
# For other dialects, pass through unchanged
|
|
91
89
|
return value
|
|
92
90
|
|
|
93
91
|
def supports_native_type(self, type_name: str) -> bool:
|
|
@@ -104,11 +102,10 @@ class ADBCTypeConverter(BaseTypeConverter):
|
|
|
104
102
|
"postgresql": ["uuid", "json", "interval", "pg_array"],
|
|
105
103
|
"duckdb": ["uuid", "json"],
|
|
106
104
|
"bigquery": ["json"],
|
|
107
|
-
"sqlite": [],
|
|
105
|
+
"sqlite": [],
|
|
108
106
|
"mysql": ["json"],
|
|
109
107
|
"snowflake": ["json"],
|
|
110
108
|
}
|
|
111
|
-
|
|
112
109
|
return type_name in native_support.get(self.dialect, [])
|
|
113
110
|
|
|
114
111
|
def get_dialect_specific_converter(self, value: Any, target_type: str) -> Any:
|
|
@@ -124,36 +121,33 @@ class ADBCTypeConverter(BaseTypeConverter):
|
|
|
124
121
|
if self.dialect in {"postgres", "postgresql"}:
|
|
125
122
|
if target_type in {"uuid", "json", "interval"}:
|
|
126
123
|
return self.convert_value(value, target_type)
|
|
127
|
-
|
|
128
124
|
elif self.dialect == "duckdb":
|
|
129
125
|
if target_type in {"uuid", "json"}:
|
|
130
126
|
return self.convert_value(value, target_type)
|
|
131
|
-
|
|
132
127
|
elif self.dialect == "sqlite":
|
|
133
128
|
if target_type == "uuid":
|
|
134
129
|
return str(value)
|
|
135
130
|
if target_type == "json":
|
|
136
131
|
return self.convert_value(value, target_type)
|
|
137
|
-
|
|
138
132
|
elif self.dialect == "bigquery":
|
|
139
133
|
if target_type == "uuid":
|
|
140
134
|
return str(self.convert_value(value, target_type))
|
|
141
135
|
if target_type == "json":
|
|
142
136
|
return self.convert_value(value, target_type)
|
|
143
|
-
|
|
144
137
|
return self.convert_value(value, target_type) if hasattr(self, "convert_value") else value
|
|
145
138
|
|
|
146
139
|
|
|
147
|
-
def get_adbc_type_converter(dialect: str) -> ADBCTypeConverter:
|
|
140
|
+
def get_adbc_type_converter(dialect: str, cache_size: int = 5000) -> ADBCTypeConverter:
|
|
148
141
|
"""Factory function to create dialect-specific ADBC type converter.
|
|
149
142
|
|
|
150
143
|
Args:
|
|
151
144
|
dialect: Database dialect name.
|
|
145
|
+
cache_size: Maximum number of string values to cache (default: 5000)
|
|
152
146
|
|
|
153
147
|
Returns:
|
|
154
148
|
Configured ADBCTypeConverter instance.
|
|
155
149
|
"""
|
|
156
|
-
return ADBCTypeConverter(dialect)
|
|
150
|
+
return ADBCTypeConverter(dialect, cache_size)
|
|
157
151
|
|
|
158
152
|
|
|
159
|
-
__all__ = ("ADBCTypeConverter", "get_adbc_type_converter")
|
|
153
|
+
__all__ = ("ADBC_SPECIAL_CHARS", "ADBCTypeConverter", "get_adbc_type_converter")
|