sqlspec 0.25.0__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +7 -15
- sqlspec/_serialization.py +256 -24
- sqlspec/_typing.py +71 -52
- sqlspec/adapters/adbc/_types.py +1 -1
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +870 -0
- sqlspec/adapters/adbc/config.py +69 -12
- sqlspec/adapters/adbc/data_dictionary.py +340 -0
- sqlspec/adapters/adbc/driver.py +266 -58
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +504 -0
- sqlspec/adapters/adbc/type_converter.py +153 -0
- sqlspec/adapters/aiosqlite/_types.py +1 -1
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +527 -0
- sqlspec/adapters/aiosqlite/config.py +88 -15
- sqlspec/adapters/aiosqlite/data_dictionary.py +149 -0
- sqlspec/adapters/aiosqlite/driver.py +143 -40
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
- sqlspec/adapters/aiosqlite/pool.py +7 -7
- sqlspec/adapters/asyncmy/__init__.py +7 -1
- sqlspec/adapters/asyncmy/_types.py +2 -2
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +493 -0
- sqlspec/adapters/asyncmy/config.py +68 -23
- sqlspec/adapters/asyncmy/data_dictionary.py +161 -0
- sqlspec/adapters/asyncmy/driver.py +313 -58
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +2 -1
- sqlspec/adapters/asyncpg/_type_handlers.py +71 -0
- sqlspec/adapters/asyncpg/_types.py +11 -7
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +450 -0
- sqlspec/adapters/asyncpg/config.py +59 -35
- sqlspec/adapters/asyncpg/data_dictionary.py +173 -0
- sqlspec/adapters/asyncpg/driver.py +170 -25
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +253 -0
- sqlspec/adapters/bigquery/_types.py +1 -1
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +576 -0
- sqlspec/adapters/bigquery/config.py +27 -10
- sqlspec/adapters/bigquery/data_dictionary.py +149 -0
- sqlspec/adapters/bigquery/driver.py +368 -142
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +327 -0
- sqlspec/adapters/bigquery/type_converter.py +125 -0
- sqlspec/adapters/duckdb/_types.py +1 -1
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +553 -0
- sqlspec/adapters/duckdb/config.py +80 -20
- sqlspec/adapters/duckdb/data_dictionary.py +163 -0
- sqlspec/adapters/duckdb/driver.py +167 -45
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +332 -0
- sqlspec/adapters/duckdb/pool.py +4 -4
- sqlspec/adapters/duckdb/type_converter.py +133 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
- sqlspec/adapters/oracledb/_types.py +20 -2
- sqlspec/adapters/oracledb/adk/__init__.py +5 -0
- sqlspec/adapters/oracledb/adk/store.py +1745 -0
- sqlspec/adapters/oracledb/config.py +122 -32
- sqlspec/adapters/oracledb/data_dictionary.py +509 -0
- sqlspec/adapters/oracledb/driver.py +353 -91
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +767 -0
- sqlspec/adapters/oracledb/migrations.py +348 -73
- sqlspec/adapters/oracledb/type_converter.py +207 -0
- sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
- sqlspec/adapters/psqlpy/_types.py +2 -1
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +482 -0
- sqlspec/adapters/psqlpy/config.py +46 -17
- sqlspec/adapters/psqlpy/data_dictionary.py +172 -0
- sqlspec/adapters/psqlpy/driver.py +123 -209
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +272 -0
- sqlspec/adapters/psqlpy/type_converter.py +102 -0
- sqlspec/adapters/psycopg/_type_handlers.py +80 -0
- sqlspec/adapters/psycopg/_types.py +2 -1
- sqlspec/adapters/psycopg/adk/__init__.py +5 -0
- sqlspec/adapters/psycopg/adk/store.py +944 -0
- sqlspec/adapters/psycopg/config.py +69 -35
- sqlspec/adapters/psycopg/data_dictionary.py +331 -0
- sqlspec/adapters/psycopg/driver.py +238 -81
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +554 -0
- sqlspec/adapters/sqlite/__init__.py +2 -1
- sqlspec/adapters/sqlite/_type_handlers.py +86 -0
- sqlspec/adapters/sqlite/_types.py +1 -1
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +572 -0
- sqlspec/adapters/sqlite/config.py +87 -15
- sqlspec/adapters/sqlite/data_dictionary.py +149 -0
- sqlspec/adapters/sqlite/driver.py +137 -54
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +318 -0
- sqlspec/adapters/sqlite/pool.py +18 -9
- sqlspec/base.py +45 -26
- sqlspec/builder/__init__.py +73 -4
- sqlspec/builder/_base.py +162 -89
- sqlspec/builder/_column.py +62 -29
- sqlspec/builder/_ddl.py +180 -121
- sqlspec/builder/_delete.py +5 -4
- sqlspec/builder/_dml.py +388 -0
- sqlspec/{_sql.py → builder/_factory.py} +53 -94
- sqlspec/builder/_insert.py +32 -131
- sqlspec/builder/_join.py +375 -0
- sqlspec/builder/_merge.py +446 -11
- sqlspec/builder/_parsing_utils.py +111 -17
- sqlspec/builder/_select.py +1457 -24
- sqlspec/builder/_update.py +11 -42
- sqlspec/cli.py +307 -194
- sqlspec/config.py +252 -67
- sqlspec/core/__init__.py +5 -4
- sqlspec/core/cache.py +17 -17
- sqlspec/core/compiler.py +62 -9
- sqlspec/core/filters.py +37 -37
- sqlspec/core/hashing.py +9 -9
- sqlspec/core/parameters.py +83 -48
- sqlspec/core/result.py +102 -46
- sqlspec/core/splitter.py +16 -17
- sqlspec/core/statement.py +36 -30
- sqlspec/core/type_conversion.py +235 -0
- sqlspec/driver/__init__.py +7 -6
- sqlspec/driver/_async.py +188 -151
- sqlspec/driver/_common.py +285 -80
- sqlspec/driver/_sync.py +188 -152
- sqlspec/driver/mixins/_result_tools.py +20 -236
- sqlspec/driver/mixins/_sql_translator.py +4 -4
- sqlspec/exceptions.py +75 -7
- sqlspec/extensions/adk/__init__.py +53 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +181 -0
- sqlspec/extensions/adk/store.py +536 -0
- sqlspec/extensions/aiosql/adapter.py +73 -53
- sqlspec/extensions/litestar/__init__.py +21 -4
- sqlspec/extensions/litestar/cli.py +54 -10
- sqlspec/extensions/litestar/config.py +59 -266
- sqlspec/extensions/litestar/handlers.py +46 -17
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +324 -223
- sqlspec/extensions/litestar/providers.py +25 -25
- sqlspec/extensions/litestar/store.py +265 -0
- sqlspec/loader.py +30 -49
- sqlspec/migrations/__init__.py +4 -3
- sqlspec/migrations/base.py +302 -39
- sqlspec/migrations/commands.py +611 -144
- sqlspec/migrations/context.py +142 -0
- sqlspec/migrations/fix.py +199 -0
- sqlspec/migrations/loaders.py +68 -23
- sqlspec/migrations/runner.py +543 -107
- sqlspec/migrations/tracker.py +237 -21
- sqlspec/migrations/utils.py +51 -3
- sqlspec/migrations/validation.py +177 -0
- sqlspec/protocols.py +66 -36
- sqlspec/storage/_utils.py +98 -0
- sqlspec/storage/backends/fsspec.py +134 -106
- sqlspec/storage/backends/local.py +78 -51
- sqlspec/storage/backends/obstore.py +278 -162
- sqlspec/storage/registry.py +75 -39
- sqlspec/typing.py +16 -84
- sqlspec/utils/config_resolver.py +153 -0
- sqlspec/utils/correlation.py +4 -5
- sqlspec/utils/data_transformation.py +3 -2
- sqlspec/utils/deprecation.py +9 -8
- sqlspec/utils/fixtures.py +4 -4
- sqlspec/utils/logging.py +46 -6
- sqlspec/utils/module_loader.py +2 -2
- sqlspec/utils/schema.py +288 -0
- sqlspec/utils/serializers.py +50 -2
- sqlspec/utils/sync_tools.py +21 -17
- sqlspec/utils/text.py +1 -2
- sqlspec/utils/type_guards.py +111 -20
- sqlspec/utils/version.py +433 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/METADATA +40 -21
- sqlspec-0.27.0.dist-info/RECORD +207 -0
- sqlspec/builder/mixins/__init__.py +0 -55
- sqlspec/builder/mixins/_cte_and_set_ops.py +0 -254
- sqlspec/builder/mixins/_delete_operations.py +0 -50
- sqlspec/builder/mixins/_insert_operations.py +0 -282
- sqlspec/builder/mixins/_join_operations.py +0 -389
- sqlspec/builder/mixins/_merge_operations.py +0 -592
- sqlspec/builder/mixins/_order_limit_operations.py +0 -152
- sqlspec/builder/mixins/_pivot_operations.py +0 -157
- sqlspec/builder/mixins/_select_operations.py +0 -936
- sqlspec/builder/mixins/_update_operations.py +0 -218
- sqlspec/builder/mixins/_where_clause.py +0 -1304
- sqlspec-0.25.0.dist-info/RECORD +0 -139
- sqlspec-0.25.0.dist-info/licenses/NOTICE +0 -29
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
"""Psqlpy session store for Litestar integration."""
|
|
2
|
+
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from sqlspec.extensions.litestar.store import BaseSQLSpecStore
|
|
7
|
+
from sqlspec.utils.logging import get_logger
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from sqlspec.adapters.psqlpy.config import PsqlpyConfig
|
|
11
|
+
|
|
12
|
+
logger = get_logger("adapters.psqlpy.litestar.store")
|
|
13
|
+
|
|
14
|
+
__all__ = ("PsqlpyStore",)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PsqlpyStore(BaseSQLSpecStore["PsqlpyConfig"]):
|
|
18
|
+
"""PostgreSQL session store using Psqlpy driver.
|
|
19
|
+
|
|
20
|
+
Implements server-side session storage for Litestar using PostgreSQL
|
|
21
|
+
via the Psqlpy driver (Rust-based async driver). Provides efficient
|
|
22
|
+
session management with:
|
|
23
|
+
- Native async PostgreSQL operations via Rust
|
|
24
|
+
- UPSERT support using ON CONFLICT
|
|
25
|
+
- Automatic expiration handling
|
|
26
|
+
- Efficient cleanup of expired sessions
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
config: PsqlpyConfig instance.
|
|
30
|
+
|
|
31
|
+
Example:
|
|
32
|
+
from sqlspec.adapters.psqlpy import PsqlpyConfig
|
|
33
|
+
from sqlspec.adapters.psqlpy.litestar.store import PsqlpyStore
|
|
34
|
+
|
|
35
|
+
config = PsqlpyConfig(pool_config={"dsn": "postgresql://..."})
|
|
36
|
+
store = PsqlpyStore(config)
|
|
37
|
+
await store.create_table()
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
__slots__ = ()
|
|
41
|
+
|
|
42
|
+
def __init__(self, config: "PsqlpyConfig") -> None:
|
|
43
|
+
"""Initialize Psqlpy session store.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
config: PsqlpyConfig instance.
|
|
47
|
+
|
|
48
|
+
Notes:
|
|
49
|
+
Table name is read from config.extension_config["litestar"]["session_table"].
|
|
50
|
+
"""
|
|
51
|
+
super().__init__(config)
|
|
52
|
+
|
|
53
|
+
def _get_create_table_sql(self) -> str:
|
|
54
|
+
"""Get PostgreSQL CREATE TABLE SQL with optimized schema.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
SQL statement to create the sessions table with proper indexes.
|
|
58
|
+
|
|
59
|
+
Notes:
|
|
60
|
+
- Uses TIMESTAMPTZ for timezone-aware expiration timestamps
|
|
61
|
+
- Partial index WHERE expires_at IS NOT NULL reduces index size/maintenance
|
|
62
|
+
- FILLFACTOR 80 leaves space for HOT updates, reducing table bloat
|
|
63
|
+
- Audit columns (created_at, updated_at) help with debugging
|
|
64
|
+
- Table name is internally controlled, not user input (S608 suppressed)
|
|
65
|
+
"""
|
|
66
|
+
return f"""
|
|
67
|
+
CREATE TABLE IF NOT EXISTS {self._table_name} (
|
|
68
|
+
session_id TEXT PRIMARY KEY,
|
|
69
|
+
data BYTEA NOT NULL,
|
|
70
|
+
expires_at TIMESTAMPTZ,
|
|
71
|
+
created_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
72
|
+
updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
|
|
73
|
+
) WITH (fillfactor = 80);
|
|
74
|
+
|
|
75
|
+
CREATE INDEX IF NOT EXISTS idx_{self._table_name}_expires_at
|
|
76
|
+
ON {self._table_name}(expires_at) WHERE expires_at IS NOT NULL;
|
|
77
|
+
|
|
78
|
+
ALTER TABLE {self._table_name} SET (
|
|
79
|
+
autovacuum_vacuum_scale_factor = 0.05,
|
|
80
|
+
autovacuum_analyze_scale_factor = 0.02
|
|
81
|
+
);
|
|
82
|
+
"""
|
|
83
|
+
|
|
84
|
+
def _get_drop_table_sql(self) -> "list[str]":
|
|
85
|
+
"""Get PostgreSQL DROP TABLE SQL statements.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
List of SQL statements to drop indexes and table.
|
|
89
|
+
"""
|
|
90
|
+
return [f"DROP INDEX IF EXISTS idx_{self._table_name}_expires_at", f"DROP TABLE IF EXISTS {self._table_name}"]
|
|
91
|
+
|
|
92
|
+
async def create_table(self) -> None:
|
|
93
|
+
"""Create the session table if it doesn't exist."""
|
|
94
|
+
sql = self._get_create_table_sql()
|
|
95
|
+
async with self._config.provide_session() as driver:
|
|
96
|
+
await driver.execute_script(sql)
|
|
97
|
+
logger.debug("Created session table: %s", self._table_name)
|
|
98
|
+
|
|
99
|
+
async def get(self, key: str, renew_for: "int | timedelta | None" = None) -> "bytes | None":
|
|
100
|
+
"""Get a session value by key.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
key: Session ID to retrieve.
|
|
104
|
+
renew_for: If given, renew the expiry time for this duration.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
Session data as bytes if found and not expired, None otherwise.
|
|
108
|
+
|
|
109
|
+
Notes:
|
|
110
|
+
Uses CURRENT_TIMESTAMP instead of NOW() for SQL standard compliance.
|
|
111
|
+
The query planner can use the partial index for expires_at > CURRENT_TIMESTAMP.
|
|
112
|
+
"""
|
|
113
|
+
sql = f"""
|
|
114
|
+
SELECT data, expires_at FROM {self._table_name}
|
|
115
|
+
WHERE session_id = $1
|
|
116
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
async with self._config.provide_connection() as conn:
|
|
120
|
+
query_result = await conn.fetch(sql, [key])
|
|
121
|
+
rows = query_result.result()
|
|
122
|
+
|
|
123
|
+
if not rows:
|
|
124
|
+
return None
|
|
125
|
+
|
|
126
|
+
row = rows[0]
|
|
127
|
+
|
|
128
|
+
if renew_for is not None and row["expires_at"] is not None:
|
|
129
|
+
new_expires_at = self._calculate_expires_at(renew_for)
|
|
130
|
+
if new_expires_at is not None:
|
|
131
|
+
update_sql = f"""
|
|
132
|
+
UPDATE {self._table_name}
|
|
133
|
+
SET expires_at = $1, updated_at = CURRENT_TIMESTAMP
|
|
134
|
+
WHERE session_id = $2
|
|
135
|
+
"""
|
|
136
|
+
await conn.execute(update_sql, [new_expires_at, key])
|
|
137
|
+
|
|
138
|
+
return bytes(row["data"])
|
|
139
|
+
|
|
140
|
+
async def set(self, key: str, value: "str | bytes", expires_in: "int | timedelta | None" = None) -> None:
|
|
141
|
+
"""Store a session value.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
key: Session ID.
|
|
145
|
+
value: Session data.
|
|
146
|
+
expires_in: Time until expiration.
|
|
147
|
+
|
|
148
|
+
Notes:
|
|
149
|
+
Uses EXCLUDED to reference the proposed insert values in ON CONFLICT.
|
|
150
|
+
Updates updated_at timestamp on every write for audit trail.
|
|
151
|
+
"""
|
|
152
|
+
data = self._value_to_bytes(value)
|
|
153
|
+
expires_at = self._calculate_expires_at(expires_in)
|
|
154
|
+
|
|
155
|
+
sql = f"""
|
|
156
|
+
INSERT INTO {self._table_name} (session_id, data, expires_at)
|
|
157
|
+
VALUES ($1, $2, $3)
|
|
158
|
+
ON CONFLICT (session_id)
|
|
159
|
+
DO UPDATE SET
|
|
160
|
+
data = EXCLUDED.data,
|
|
161
|
+
expires_at = EXCLUDED.expires_at,
|
|
162
|
+
updated_at = CURRENT_TIMESTAMP
|
|
163
|
+
"""
|
|
164
|
+
|
|
165
|
+
async with self._config.provide_connection() as conn:
|
|
166
|
+
await conn.execute(sql, [key, data, expires_at])
|
|
167
|
+
|
|
168
|
+
async def delete(self, key: str) -> None:
|
|
169
|
+
"""Delete a session by key.
|
|
170
|
+
|
|
171
|
+
Args:
|
|
172
|
+
key: Session ID to delete.
|
|
173
|
+
"""
|
|
174
|
+
sql = f"DELETE FROM {self._table_name} WHERE session_id = $1"
|
|
175
|
+
|
|
176
|
+
async with self._config.provide_connection() as conn:
|
|
177
|
+
await conn.execute(sql, [key])
|
|
178
|
+
|
|
179
|
+
async def delete_all(self) -> None:
|
|
180
|
+
"""Delete all sessions from the store."""
|
|
181
|
+
sql = f"DELETE FROM {self._table_name}"
|
|
182
|
+
|
|
183
|
+
async with self._config.provide_connection() as conn:
|
|
184
|
+
await conn.execute(sql)
|
|
185
|
+
logger.debug("Deleted all sessions from table: %s", self._table_name)
|
|
186
|
+
|
|
187
|
+
async def exists(self, key: str) -> bool:
|
|
188
|
+
"""Check if a session key exists and is not expired.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
key: Session ID to check.
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
True if the session exists and is not expired.
|
|
195
|
+
|
|
196
|
+
Notes:
|
|
197
|
+
Uses CURRENT_TIMESTAMP for consistency with get() method.
|
|
198
|
+
Uses fetch() instead of fetch_val() to handle zero-row case.
|
|
199
|
+
"""
|
|
200
|
+
sql = f"""
|
|
201
|
+
SELECT 1 FROM {self._table_name}
|
|
202
|
+
WHERE session_id = $1
|
|
203
|
+
AND (expires_at IS NULL OR expires_at > CURRENT_TIMESTAMP)
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
async with self._config.provide_connection() as conn:
|
|
207
|
+
query_result = await conn.fetch(sql, [key])
|
|
208
|
+
rows = query_result.result()
|
|
209
|
+
return len(rows) > 0
|
|
210
|
+
|
|
211
|
+
async def expires_in(self, key: str) -> "int | None":
|
|
212
|
+
"""Get the time in seconds until the session expires.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
key: Session ID to check.
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
Seconds until expiration, or None if no expiry or key doesn't exist.
|
|
219
|
+
|
|
220
|
+
Notes:
|
|
221
|
+
Uses fetch() to handle the case where the key doesn't exist.
|
|
222
|
+
"""
|
|
223
|
+
sql = f"""
|
|
224
|
+
SELECT expires_at FROM {self._table_name}
|
|
225
|
+
WHERE session_id = $1
|
|
226
|
+
"""
|
|
227
|
+
|
|
228
|
+
async with self._config.provide_connection() as conn:
|
|
229
|
+
query_result = await conn.fetch(sql, [key])
|
|
230
|
+
rows = query_result.result()
|
|
231
|
+
|
|
232
|
+
if not rows:
|
|
233
|
+
return None
|
|
234
|
+
|
|
235
|
+
expires_at = rows[0]["expires_at"]
|
|
236
|
+
|
|
237
|
+
if expires_at is None:
|
|
238
|
+
return None
|
|
239
|
+
|
|
240
|
+
now = datetime.now(timezone.utc)
|
|
241
|
+
if expires_at <= now:
|
|
242
|
+
return 0
|
|
243
|
+
|
|
244
|
+
delta = expires_at - now
|
|
245
|
+
return int(delta.total_seconds())
|
|
246
|
+
|
|
247
|
+
async def delete_expired(self) -> int:
|
|
248
|
+
"""Delete all expired sessions.
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
Number of sessions deleted.
|
|
252
|
+
|
|
253
|
+
Notes:
|
|
254
|
+
Uses CURRENT_TIMESTAMP for consistency.
|
|
255
|
+
Uses RETURNING to get deleted row count since psqlpy QueryResult
|
|
256
|
+
doesn't expose command tags.
|
|
257
|
+
For very large tables (10M+ rows), consider batching deletes
|
|
258
|
+
to avoid holding locks too long.
|
|
259
|
+
"""
|
|
260
|
+
sql = f"""
|
|
261
|
+
DELETE FROM {self._table_name}
|
|
262
|
+
WHERE expires_at <= CURRENT_TIMESTAMP
|
|
263
|
+
RETURNING session_id
|
|
264
|
+
"""
|
|
265
|
+
|
|
266
|
+
async with self._config.provide_connection() as conn:
|
|
267
|
+
query_result = await conn.fetch(sql, [])
|
|
268
|
+
rows = query_result.result()
|
|
269
|
+
count = len(rows)
|
|
270
|
+
if count > 0:
|
|
271
|
+
logger.debug("Cleaned up %d expired sessions", count)
|
|
272
|
+
return count
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
"""PostgreSQL-specific type conversion for psqlpy adapter.
|
|
2
|
+
|
|
3
|
+
Provides specialized type handling for PostgreSQL databases, including
|
|
4
|
+
PostgreSQL-specific types like intervals and arrays while preserving
|
|
5
|
+
backward compatibility.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import re
|
|
9
|
+
from functools import lru_cache
|
|
10
|
+
from typing import Any, Final
|
|
11
|
+
|
|
12
|
+
from sqlspec.core.type_conversion import BaseTypeConverter
|
|
13
|
+
|
|
14
|
+
PG_SPECIFIC_REGEX: Final[re.Pattern[str]] = re.compile(
|
|
15
|
+
r"^(?:"
|
|
16
|
+
r"(?P<interval>(?:(?:\d+\s+(?:year|month|day|hour|minute|second)s?\s*)+)|(?:P(?:\d+Y)?(?:\d+M)?(?:\d+D)?(?:T(?:\d+H)?(?:\d+M)?(?:\d+(?:\.\d+)?S)?)?))|"
|
|
17
|
+
r"(?P<pg_array>\{(?:[^{}]+|\{[^{}]*\})*\})"
|
|
18
|
+
r")$",
|
|
19
|
+
re.IGNORECASE,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
PG_SPECIAL_CHARS: Final[frozenset[str]] = frozenset({"{", "-", ":", "T", ".", "P", "[", "Y", "M", "D", "H", "S"})
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class PostgreSQLTypeConverter(BaseTypeConverter):
|
|
26
|
+
"""PostgreSQL-specific type converter with interval and array support.
|
|
27
|
+
|
|
28
|
+
Extends the base BaseTypeConverter with PostgreSQL-specific functionality
|
|
29
|
+
while maintaining backward compatibility for interval and array types.
|
|
30
|
+
Includes per-instance LRU cache for improved performance.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
__slots__ = ("_convert_cache",)
|
|
34
|
+
|
|
35
|
+
def __init__(self, cache_size: int = 5000) -> None:
|
|
36
|
+
"""Initialize converter with per-instance conversion cache.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
cache_size: Maximum number of string values to cache (default: 5000)
|
|
40
|
+
"""
|
|
41
|
+
super().__init__()
|
|
42
|
+
|
|
43
|
+
@lru_cache(maxsize=cache_size)
|
|
44
|
+
def _cached_convert(value: str) -> Any:
|
|
45
|
+
if not value or not any(c in value for c in PG_SPECIAL_CHARS):
|
|
46
|
+
return value
|
|
47
|
+
detected_type = self.detect_type(value)
|
|
48
|
+
return self.convert_value(value, detected_type) if detected_type else value
|
|
49
|
+
|
|
50
|
+
self._convert_cache = _cached_convert
|
|
51
|
+
|
|
52
|
+
def convert_if_detected(self, value: Any) -> Any:
|
|
53
|
+
"""Convert string if special type detected (cached).
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
value: Value to potentially convert
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
Converted value or original value
|
|
60
|
+
"""
|
|
61
|
+
if not isinstance(value, str):
|
|
62
|
+
return value
|
|
63
|
+
return self._convert_cache(value)
|
|
64
|
+
|
|
65
|
+
def detect_type(self, value: str) -> str | None:
|
|
66
|
+
"""Detect types including PostgreSQL-specific types.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
value: String value to analyze.
|
|
70
|
+
|
|
71
|
+
Returns:
|
|
72
|
+
Type name if detected, None otherwise.
|
|
73
|
+
"""
|
|
74
|
+
detected_type = super().detect_type(value)
|
|
75
|
+
if detected_type:
|
|
76
|
+
return detected_type
|
|
77
|
+
|
|
78
|
+
match = PG_SPECIFIC_REGEX.match(value)
|
|
79
|
+
if match:
|
|
80
|
+
for group_name in ["interval", "pg_array"]:
|
|
81
|
+
if match.group(group_name):
|
|
82
|
+
return group_name
|
|
83
|
+
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
def convert_value(self, value: str, detected_type: str) -> Any:
|
|
87
|
+
"""Convert value with PostgreSQL-specific handling.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
value: String value to convert.
|
|
91
|
+
detected_type: Detected type name.
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
Converted value or original string for PostgreSQL-specific types.
|
|
95
|
+
"""
|
|
96
|
+
if detected_type in {"interval", "pg_array"}:
|
|
97
|
+
return value
|
|
98
|
+
|
|
99
|
+
return super().convert_value(value, detected_type)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
__all__ = ("PG_SPECIAL_CHARS", "PG_SPECIFIC_REGEX", "PostgreSQLTypeConverter")
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"""Psycopg pgvector type handlers for vector data type support.
|
|
2
|
+
|
|
3
|
+
Provides automatic conversion between NumPy arrays and PostgreSQL vector types
|
|
4
|
+
via pgvector-python library. Supports both sync and async connections.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
|
|
10
|
+
from sqlspec.typing import NUMPY_INSTALLED, PGVECTOR_INSTALLED
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from psycopg import AsyncConnection, Connection
|
|
14
|
+
|
|
15
|
+
__all__ = ("register_pgvector_async", "register_pgvector_sync")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def register_pgvector_sync(connection: "Connection[Any]") -> None:
|
|
22
|
+
"""Register pgvector type handlers on psycopg sync connection.
|
|
23
|
+
|
|
24
|
+
Enables automatic conversion between NumPy arrays and PostgreSQL vector types
|
|
25
|
+
using the pgvector-python library.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
connection: Psycopg sync connection.
|
|
29
|
+
"""
|
|
30
|
+
if not PGVECTOR_INSTALLED:
|
|
31
|
+
logger.debug("pgvector not installed - skipping vector type handlers")
|
|
32
|
+
return
|
|
33
|
+
|
|
34
|
+
if not NUMPY_INSTALLED:
|
|
35
|
+
logger.debug("NumPy not installed - registering pgvector without NumPy support")
|
|
36
|
+
|
|
37
|
+
try:
|
|
38
|
+
import pgvector.psycopg
|
|
39
|
+
|
|
40
|
+
pgvector.psycopg.register_vector(connection)
|
|
41
|
+
logger.debug("Registered pgvector type handlers on psycopg sync connection")
|
|
42
|
+
except ValueError as error:
|
|
43
|
+
message = str(error).lower()
|
|
44
|
+
if "vector type not found" in message:
|
|
45
|
+
logger.debug("Skipping pgvector registration - extension not enabled in database")
|
|
46
|
+
return
|
|
47
|
+
logger.warning("Unexpected error during pgvector registration: %s", error)
|
|
48
|
+
except Exception:
|
|
49
|
+
logger.exception("Failed to register pgvector for psycopg sync")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
async def register_pgvector_async(connection: "AsyncConnection[Any]") -> None:
|
|
53
|
+
"""Register pgvector type handlers on psycopg async connection.
|
|
54
|
+
|
|
55
|
+
Enables automatic conversion between NumPy arrays and PostgreSQL vector types
|
|
56
|
+
using the pgvector-python library.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
connection: Psycopg async connection.
|
|
60
|
+
"""
|
|
61
|
+
if not PGVECTOR_INSTALLED:
|
|
62
|
+
logger.debug("pgvector not installed - skipping vector type handlers")
|
|
63
|
+
return
|
|
64
|
+
|
|
65
|
+
if not NUMPY_INSTALLED:
|
|
66
|
+
logger.debug("NumPy not installed - registering pgvector without NumPy support")
|
|
67
|
+
|
|
68
|
+
try:
|
|
69
|
+
from pgvector.psycopg import register_vector_async
|
|
70
|
+
|
|
71
|
+
await register_vector_async(connection)
|
|
72
|
+
logger.debug("Registered pgvector type handlers on psycopg async connection")
|
|
73
|
+
except ValueError as error:
|
|
74
|
+
message = str(error).lower()
|
|
75
|
+
if "vector type not found" in message:
|
|
76
|
+
logger.debug("Skipping pgvector registration - extension not enabled in database")
|
|
77
|
+
return
|
|
78
|
+
logger.warning("Unexpected error during pgvector registration: %s", error)
|
|
79
|
+
except Exception:
|
|
80
|
+
logger.exception("Failed to register pgvector for psycopg async")
|
|
@@ -3,8 +3,9 @@ from typing import TYPE_CHECKING
|
|
|
3
3
|
from psycopg.rows import DictRow as PsycopgDictRow
|
|
4
4
|
|
|
5
5
|
if TYPE_CHECKING:
|
|
6
|
+
from typing import TypeAlias
|
|
7
|
+
|
|
6
8
|
from psycopg import AsyncConnection, Connection
|
|
7
|
-
from typing_extensions import TypeAlias
|
|
8
9
|
|
|
9
10
|
PsycopgSyncConnection: TypeAlias = Connection[PsycopgDictRow]
|
|
10
11
|
PsycopgAsyncConnection: TypeAlias = AsyncConnection[PsycopgDictRow]
|