sqlspec 0.26.0__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +7 -15
- sqlspec/_serialization.py +55 -25
- sqlspec/_typing.py +155 -52
- sqlspec/adapters/adbc/_types.py +1 -1
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +880 -0
- sqlspec/adapters/adbc/config.py +62 -12
- sqlspec/adapters/adbc/data_dictionary.py +74 -2
- sqlspec/adapters/adbc/driver.py +226 -58
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +504 -0
- sqlspec/adapters/adbc/type_converter.py +44 -50
- sqlspec/adapters/aiosqlite/_types.py +1 -1
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +536 -0
- sqlspec/adapters/aiosqlite/config.py +86 -16
- sqlspec/adapters/aiosqlite/data_dictionary.py +34 -2
- sqlspec/adapters/aiosqlite/driver.py +127 -38
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
- sqlspec/adapters/aiosqlite/pool.py +7 -7
- sqlspec/adapters/asyncmy/__init__.py +7 -1
- sqlspec/adapters/asyncmy/_types.py +1 -1
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +503 -0
- sqlspec/adapters/asyncmy/config.py +59 -17
- sqlspec/adapters/asyncmy/data_dictionary.py +41 -2
- sqlspec/adapters/asyncmy/driver.py +293 -62
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +2 -1
- sqlspec/adapters/asyncpg/_type_handlers.py +71 -0
- sqlspec/adapters/asyncpg/_types.py +11 -7
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +460 -0
- sqlspec/adapters/asyncpg/config.py +57 -36
- sqlspec/adapters/asyncpg/data_dictionary.py +48 -2
- sqlspec/adapters/asyncpg/driver.py +153 -23
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +253 -0
- sqlspec/adapters/bigquery/_types.py +1 -1
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +585 -0
- sqlspec/adapters/bigquery/config.py +36 -11
- sqlspec/adapters/bigquery/data_dictionary.py +42 -2
- sqlspec/adapters/bigquery/driver.py +489 -144
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +327 -0
- sqlspec/adapters/bigquery/type_converter.py +55 -23
- sqlspec/adapters/duckdb/_types.py +2 -2
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +563 -0
- sqlspec/adapters/duckdb/config.py +79 -21
- sqlspec/adapters/duckdb/data_dictionary.py +41 -2
- sqlspec/adapters/duckdb/driver.py +225 -44
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +332 -0
- sqlspec/adapters/duckdb/pool.py +5 -5
- sqlspec/adapters/duckdb/type_converter.py +51 -21
- sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
- sqlspec/adapters/oracledb/_types.py +20 -2
- sqlspec/adapters/oracledb/adk/__init__.py +5 -0
- sqlspec/adapters/oracledb/adk/store.py +1628 -0
- sqlspec/adapters/oracledb/config.py +120 -36
- sqlspec/adapters/oracledb/data_dictionary.py +87 -20
- sqlspec/adapters/oracledb/driver.py +475 -86
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +765 -0
- sqlspec/adapters/oracledb/migrations.py +316 -25
- sqlspec/adapters/oracledb/type_converter.py +91 -16
- sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
- sqlspec/adapters/psqlpy/_types.py +2 -1
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +483 -0
- sqlspec/adapters/psqlpy/config.py +45 -19
- sqlspec/adapters/psqlpy/data_dictionary.py +48 -2
- sqlspec/adapters/psqlpy/driver.py +108 -41
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +272 -0
- sqlspec/adapters/psqlpy/type_converter.py +40 -11
- sqlspec/adapters/psycopg/_type_handlers.py +80 -0
- sqlspec/adapters/psycopg/_types.py +2 -1
- sqlspec/adapters/psycopg/adk/__init__.py +5 -0
- sqlspec/adapters/psycopg/adk/store.py +962 -0
- sqlspec/adapters/psycopg/config.py +65 -37
- sqlspec/adapters/psycopg/data_dictionary.py +91 -3
- sqlspec/adapters/psycopg/driver.py +200 -78
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +554 -0
- sqlspec/adapters/sqlite/__init__.py +2 -1
- sqlspec/adapters/sqlite/_type_handlers.py +86 -0
- sqlspec/adapters/sqlite/_types.py +1 -1
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +582 -0
- sqlspec/adapters/sqlite/config.py +85 -16
- sqlspec/adapters/sqlite/data_dictionary.py +34 -2
- sqlspec/adapters/sqlite/driver.py +120 -52
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +318 -0
- sqlspec/adapters/sqlite/pool.py +5 -5
- sqlspec/base.py +45 -26
- sqlspec/builder/__init__.py +73 -4
- sqlspec/builder/_base.py +91 -58
- sqlspec/builder/_column.py +5 -5
- sqlspec/builder/_ddl.py +98 -89
- sqlspec/builder/_delete.py +5 -4
- sqlspec/builder/_dml.py +388 -0
- sqlspec/{_sql.py → builder/_factory.py} +41 -44
- sqlspec/builder/_insert.py +5 -82
- sqlspec/builder/{mixins/_join_operations.py → _join.py} +145 -143
- sqlspec/builder/_merge.py +446 -11
- sqlspec/builder/_parsing_utils.py +9 -11
- sqlspec/builder/_select.py +1313 -25
- sqlspec/builder/_update.py +11 -42
- sqlspec/cli.py +76 -69
- sqlspec/config.py +331 -62
- sqlspec/core/__init__.py +5 -4
- sqlspec/core/cache.py +18 -18
- sqlspec/core/compiler.py +6 -8
- sqlspec/core/filters.py +55 -47
- sqlspec/core/hashing.py +9 -9
- sqlspec/core/parameters.py +76 -45
- sqlspec/core/result.py +234 -47
- sqlspec/core/splitter.py +16 -17
- sqlspec/core/statement.py +32 -31
- sqlspec/core/type_conversion.py +3 -2
- sqlspec/driver/__init__.py +1 -3
- sqlspec/driver/_async.py +183 -160
- sqlspec/driver/_common.py +197 -109
- sqlspec/driver/_sync.py +189 -161
- sqlspec/driver/mixins/_result_tools.py +20 -236
- sqlspec/driver/mixins/_sql_translator.py +4 -4
- sqlspec/exceptions.py +70 -7
- sqlspec/extensions/adk/__init__.py +53 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +181 -0
- sqlspec/extensions/adk/store.py +536 -0
- sqlspec/extensions/aiosql/adapter.py +69 -61
- sqlspec/extensions/fastapi/__init__.py +21 -0
- sqlspec/extensions/fastapi/extension.py +331 -0
- sqlspec/extensions/fastapi/providers.py +543 -0
- sqlspec/extensions/flask/__init__.py +36 -0
- sqlspec/extensions/flask/_state.py +71 -0
- sqlspec/extensions/flask/_utils.py +40 -0
- sqlspec/extensions/flask/extension.py +389 -0
- sqlspec/extensions/litestar/__init__.py +21 -4
- sqlspec/extensions/litestar/cli.py +54 -10
- sqlspec/extensions/litestar/config.py +56 -266
- sqlspec/extensions/litestar/handlers.py +46 -17
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +349 -224
- sqlspec/extensions/litestar/providers.py +25 -25
- sqlspec/extensions/litestar/store.py +265 -0
- sqlspec/extensions/starlette/__init__.py +10 -0
- sqlspec/extensions/starlette/_state.py +25 -0
- sqlspec/extensions/starlette/_utils.py +52 -0
- sqlspec/extensions/starlette/extension.py +254 -0
- sqlspec/extensions/starlette/middleware.py +154 -0
- sqlspec/loader.py +30 -49
- sqlspec/migrations/base.py +200 -76
- sqlspec/migrations/commands.py +591 -62
- sqlspec/migrations/context.py +6 -9
- sqlspec/migrations/fix.py +199 -0
- sqlspec/migrations/loaders.py +47 -19
- sqlspec/migrations/runner.py +241 -75
- sqlspec/migrations/tracker.py +237 -21
- sqlspec/migrations/utils.py +51 -3
- sqlspec/migrations/validation.py +177 -0
- sqlspec/protocols.py +106 -36
- sqlspec/storage/_utils.py +85 -0
- sqlspec/storage/backends/fsspec.py +133 -107
- sqlspec/storage/backends/local.py +78 -51
- sqlspec/storage/backends/obstore.py +276 -168
- sqlspec/storage/registry.py +75 -39
- sqlspec/typing.py +30 -84
- sqlspec/utils/__init__.py +25 -4
- sqlspec/utils/arrow_helpers.py +81 -0
- sqlspec/utils/config_resolver.py +6 -6
- sqlspec/utils/correlation.py +4 -5
- sqlspec/utils/data_transformation.py +3 -2
- sqlspec/utils/deprecation.py +9 -8
- sqlspec/utils/fixtures.py +4 -4
- sqlspec/utils/logging.py +46 -6
- sqlspec/utils/module_loader.py +205 -5
- sqlspec/utils/portal.py +311 -0
- sqlspec/utils/schema.py +288 -0
- sqlspec/utils/serializers.py +113 -4
- sqlspec/utils/sync_tools.py +36 -22
- sqlspec/utils/text.py +1 -2
- sqlspec/utils/type_guards.py +136 -20
- sqlspec/utils/version.py +433 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/METADATA +41 -22
- sqlspec-0.28.0.dist-info/RECORD +221 -0
- sqlspec/builder/mixins/__init__.py +0 -55
- sqlspec/builder/mixins/_cte_and_set_ops.py +0 -253
- sqlspec/builder/mixins/_delete_operations.py +0 -50
- sqlspec/builder/mixins/_insert_operations.py +0 -282
- sqlspec/builder/mixins/_merge_operations.py +0 -698
- sqlspec/builder/mixins/_order_limit_operations.py +0 -145
- sqlspec/builder/mixins/_pivot_operations.py +0 -157
- sqlspec/builder/mixins/_select_operations.py +0 -930
- sqlspec/builder/mixins/_update_operations.py +0 -199
- sqlspec/builder/mixins/_where_clause.py +0 -1298
- sqlspec-0.26.0.dist-info/RECORD +0 -157
- sqlspec-0.26.0.dist-info/licenses/NOTICE +0 -29
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -5,12 +5,14 @@ to handle Oracle's unique SQL syntax requirements.
|
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
7
|
import getpass
|
|
8
|
-
from typing import TYPE_CHECKING, Any
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
9
|
|
|
10
|
-
from
|
|
11
|
-
|
|
10
|
+
from rich.console import Console
|
|
11
|
+
|
|
12
|
+
from sqlspec.builder import CreateTable, Select, sql
|
|
12
13
|
from sqlspec.migrations.base import BaseMigrationTracker
|
|
13
14
|
from sqlspec.utils.logging import get_logger
|
|
15
|
+
from sqlspec.utils.version import parse_version
|
|
14
16
|
|
|
15
17
|
if TYPE_CHECKING:
|
|
16
18
|
from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase
|
|
@@ -18,10 +20,20 @@ if TYPE_CHECKING:
|
|
|
18
20
|
__all__ = ("OracleAsyncMigrationTracker", "OracleSyncMigrationTracker")
|
|
19
21
|
|
|
20
22
|
logger = get_logger("migrations.oracle")
|
|
23
|
+
console = Console()
|
|
21
24
|
|
|
22
25
|
|
|
23
26
|
class OracleMigrationTrackerMixin:
|
|
24
|
-
"""Mixin providing Oracle-specific migration table creation.
|
|
27
|
+
"""Mixin providing Oracle-specific migration table creation and querying.
|
|
28
|
+
|
|
29
|
+
Oracle has unique identifier handling rules:
|
|
30
|
+
- Unquoted identifiers are case-insensitive and stored as UPPERCASE
|
|
31
|
+
- Quoted identifiers are case-sensitive and stored exactly as written
|
|
32
|
+
|
|
33
|
+
This mixin overrides SQL builder methods to add quoted identifiers for
|
|
34
|
+
all column references, ensuring they match the lowercase column names
|
|
35
|
+
created by the migration table.
|
|
36
|
+
"""
|
|
25
37
|
|
|
26
38
|
__slots__ = ()
|
|
27
39
|
|
|
@@ -41,6 +53,8 @@ class OracleMigrationTrackerMixin:
|
|
|
41
53
|
return (
|
|
42
54
|
sql.create_table(self.version_table)
|
|
43
55
|
.column("version_num", "VARCHAR2(32)", primary_key=True)
|
|
56
|
+
.column("version_type", "VARCHAR2(16)")
|
|
57
|
+
.column("execution_sequence", "INTEGER")
|
|
44
58
|
.column("description", "VARCHAR2(2000)")
|
|
45
59
|
.column("applied_at", "TIMESTAMP", default="CURRENT_TIMESTAMP")
|
|
46
60
|
.column("execution_time_ms", "INTEGER")
|
|
@@ -48,16 +62,152 @@ class OracleMigrationTrackerMixin:
|
|
|
48
62
|
.column("applied_by", "VARCHAR2(255)")
|
|
49
63
|
)
|
|
50
64
|
|
|
65
|
+
def _get_current_version_sql(self) -> Select:
|
|
66
|
+
"""Get Oracle-specific SQL for retrieving current version.
|
|
67
|
+
|
|
68
|
+
Uses uppercase column names with lowercase aliases to match Python expectations.
|
|
69
|
+
Oracle stores unquoted identifiers as UPPERCASE, so we query UPPERCASE columns
|
|
70
|
+
and alias them as quoted "lowercase" for result consistency.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
SQL builder object for version query.
|
|
74
|
+
"""
|
|
75
|
+
return (
|
|
76
|
+
sql.select('VERSION_NUM AS "version_num"')
|
|
77
|
+
.from_(self.version_table)
|
|
78
|
+
.order_by("EXECUTION_SEQUENCE DESC")
|
|
79
|
+
.limit(1)
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
def _get_applied_migrations_sql(self) -> Select:
|
|
83
|
+
"""Get Oracle-specific SQL for retrieving all applied migrations.
|
|
84
|
+
|
|
85
|
+
Uses uppercase column names with lowercase aliases to match Python expectations.
|
|
86
|
+
Oracle stores unquoted identifiers as UPPERCASE, so we query UPPERCASE columns
|
|
87
|
+
and alias them as quoted "lowercase" for result consistency.
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
SQL builder object for migrations query.
|
|
91
|
+
"""
|
|
92
|
+
return (
|
|
93
|
+
sql.select(
|
|
94
|
+
'VERSION_NUM AS "version_num"',
|
|
95
|
+
'VERSION_TYPE AS "version_type"',
|
|
96
|
+
'EXECUTION_SEQUENCE AS "execution_sequence"',
|
|
97
|
+
'DESCRIPTION AS "description"',
|
|
98
|
+
'APPLIED_AT AS "applied_at"',
|
|
99
|
+
'EXECUTION_TIME_MS AS "execution_time_ms"',
|
|
100
|
+
'CHECKSUM AS "checksum"',
|
|
101
|
+
'APPLIED_BY AS "applied_by"',
|
|
102
|
+
)
|
|
103
|
+
.from_(self.version_table)
|
|
104
|
+
.order_by("EXECUTION_SEQUENCE")
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
def _get_next_execution_sequence_sql(self) -> Select:
|
|
108
|
+
"""Get Oracle-specific SQL for retrieving next execution sequence.
|
|
109
|
+
|
|
110
|
+
Uses uppercase column names with lowercase alias to match Python expectations.
|
|
111
|
+
Oracle stores unquoted identifiers as UPPERCASE, so we query UPPERCASE columns
|
|
112
|
+
and alias them as quoted "lowercase" for result consistency.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
SQL builder object for sequence query.
|
|
116
|
+
"""
|
|
117
|
+
return sql.select('COALESCE(MAX(EXECUTION_SEQUENCE), 0) + 1 AS "next_seq"').from_(self.version_table)
|
|
118
|
+
|
|
119
|
+
def _get_existing_columns_sql(self) -> str:
|
|
120
|
+
"""Get SQL to query existing columns in the tracking table.
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
Raw SQL string for Oracle's USER_TAB_COLUMNS query.
|
|
124
|
+
"""
|
|
125
|
+
return f"""
|
|
126
|
+
SELECT column_name
|
|
127
|
+
FROM user_tab_columns
|
|
128
|
+
WHERE table_name = '{self.version_table.upper()}'
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
def _detect_missing_columns(self, existing_columns: "set[str]") -> "set[str]":
|
|
132
|
+
"""Detect which columns are missing from the current schema.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
existing_columns: Set of existing column names (uppercase).
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
Set of missing column names (lowercase).
|
|
139
|
+
"""
|
|
140
|
+
target_create = self._get_create_table_sql()
|
|
141
|
+
target_columns = {col.name.lower() for col in target_create.columns}
|
|
142
|
+
existing_lower = {col.lower() for col in existing_columns}
|
|
143
|
+
return target_columns - existing_lower
|
|
144
|
+
|
|
51
145
|
|
|
52
146
|
class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTracker["SyncDriverAdapterBase"]):
|
|
53
147
|
"""Oracle-specific sync migration tracker."""
|
|
54
148
|
|
|
55
149
|
__slots__ = ()
|
|
56
150
|
|
|
151
|
+
def _migrate_schema_if_needed(self, driver: "SyncDriverAdapterBase") -> None:
|
|
152
|
+
"""Check for and add any missing columns to the tracking table.
|
|
153
|
+
|
|
154
|
+
Uses the driver's data dictionary to query existing columns from Oracle's
|
|
155
|
+
USER_TAB_COLUMNS metadata table.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
driver: The database driver to use.
|
|
159
|
+
"""
|
|
160
|
+
try:
|
|
161
|
+
columns_data = driver.data_dictionary.get_columns(driver, self.version_table)
|
|
162
|
+
existing_columns = {str(row["column_name"]).upper() for row in columns_data}
|
|
163
|
+
missing_columns = self._detect_missing_columns(existing_columns)
|
|
164
|
+
|
|
165
|
+
if not missing_columns:
|
|
166
|
+
logger.debug("Migration tracking table schema is up-to-date")
|
|
167
|
+
return
|
|
168
|
+
|
|
169
|
+
console.print(
|
|
170
|
+
f"[cyan]Migrating tracking table schema, adding columns: {', '.join(sorted(missing_columns))}[/]"
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
for col_name in sorted(missing_columns):
|
|
174
|
+
self._add_column(driver, col_name)
|
|
175
|
+
|
|
176
|
+
driver.commit()
|
|
177
|
+
console.print("[green]Migration tracking table schema updated successfully[/]")
|
|
178
|
+
|
|
179
|
+
except Exception as e:
|
|
180
|
+
logger.warning("Could not check or migrate tracking table schema: %s", e)
|
|
181
|
+
|
|
182
|
+
def _add_column(self, driver: "SyncDriverAdapterBase", column_name: str) -> None:
|
|
183
|
+
"""Add a single column to the tracking table.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
driver: The database driver to use.
|
|
187
|
+
column_name: Name of the column to add (lowercase).
|
|
188
|
+
"""
|
|
189
|
+
target_create = self._get_create_table_sql()
|
|
190
|
+
column_def = next((col for col in target_create.columns if col.name.lower() == column_name), None)
|
|
191
|
+
|
|
192
|
+
if not column_def:
|
|
193
|
+
return
|
|
194
|
+
|
|
195
|
+
default_clause = f" DEFAULT {column_def.default}" if column_def.default else ""
|
|
196
|
+
not_null_clause = " NOT NULL" if column_def.not_null else ""
|
|
197
|
+
|
|
198
|
+
alter_sql = f"""
|
|
199
|
+
ALTER TABLE {self.version_table}
|
|
200
|
+
ADD {column_def.name} {column_def.dtype}{default_clause}{not_null_clause}
|
|
201
|
+
"""
|
|
202
|
+
|
|
203
|
+
driver.execute(alter_sql)
|
|
204
|
+
logger.debug("Added column %s to tracking table", column_name)
|
|
205
|
+
|
|
57
206
|
def ensure_tracking_table(self, driver: "SyncDriverAdapterBase") -> None:
|
|
58
207
|
"""Create the migration tracking table if it doesn't exist.
|
|
59
208
|
|
|
60
209
|
Uses a PL/SQL block to make the operation atomic and prevent race conditions.
|
|
210
|
+
Also checks for and adds missing columns to support schema migrations.
|
|
61
211
|
|
|
62
212
|
Args:
|
|
63
213
|
driver: The database driver to use.
|
|
@@ -67,6 +217,8 @@ class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrack
|
|
|
67
217
|
EXECUTE IMMEDIATE '
|
|
68
218
|
CREATE TABLE {self.version_table} (
|
|
69
219
|
version_num VARCHAR2(32) PRIMARY KEY,
|
|
220
|
+
version_type VARCHAR2(16),
|
|
221
|
+
execution_sequence INTEGER,
|
|
70
222
|
description VARCHAR2(2000),
|
|
71
223
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
72
224
|
execution_time_ms INTEGER,
|
|
@@ -85,7 +237,9 @@ class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrack
|
|
|
85
237
|
driver.execute_script(create_script)
|
|
86
238
|
driver.commit()
|
|
87
239
|
|
|
88
|
-
|
|
240
|
+
self._migrate_schema_if_needed(driver)
|
|
241
|
+
|
|
242
|
+
def get_current_version(self, driver: "SyncDriverAdapterBase") -> "str | None":
|
|
89
243
|
"""Get the latest applied migration version.
|
|
90
244
|
|
|
91
245
|
Args:
|
|
@@ -95,7 +249,8 @@ class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrack
|
|
|
95
249
|
The current migration version or None if no migrations applied.
|
|
96
250
|
"""
|
|
97
251
|
result = driver.execute(self._get_current_version_sql())
|
|
98
|
-
|
|
252
|
+
data = result.get_data()
|
|
253
|
+
return data[0]["version_num"] if data else None
|
|
99
254
|
|
|
100
255
|
def get_applied_migrations(self, driver: "SyncDriverAdapterBase") -> "list[dict[str, Any]]":
|
|
101
256
|
"""Get all applied migrations in order.
|
|
@@ -104,15 +259,10 @@ class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrack
|
|
|
104
259
|
driver: The database driver to use.
|
|
105
260
|
|
|
106
261
|
Returns:
|
|
107
|
-
List of migration records as dictionaries.
|
|
262
|
+
List of migration records as dictionaries with lowercase keys.
|
|
108
263
|
"""
|
|
109
264
|
result = driver.execute(self._get_applied_migrations_sql())
|
|
110
|
-
|
|
111
|
-
return []
|
|
112
|
-
|
|
113
|
-
normalized_data = [{key.lower(): value for key, value in row.items()} for row in result.data]
|
|
114
|
-
|
|
115
|
-
return cast("list[dict[str, Any]]", normalized_data)
|
|
265
|
+
return result.get_data()
|
|
116
266
|
|
|
117
267
|
def record_migration(
|
|
118
268
|
self, driver: "SyncDriverAdapterBase", version: str, description: str, execution_time_ms: int, checksum: str
|
|
@@ -126,10 +276,17 @@ class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrack
|
|
|
126
276
|
execution_time_ms: Execution time in milliseconds.
|
|
127
277
|
checksum: MD5 checksum of the migration content.
|
|
128
278
|
"""
|
|
129
|
-
|
|
130
279
|
applied_by = getpass.getuser()
|
|
280
|
+
parsed_version = parse_version(version)
|
|
281
|
+
version_type = parsed_version.type.value
|
|
282
|
+
|
|
283
|
+
next_seq_result = driver.execute(self._get_next_execution_sequence_sql())
|
|
284
|
+
seq_data = next_seq_result.get_data()
|
|
285
|
+
execution_sequence = seq_data[0]["next_seq"] if seq_data else 1
|
|
131
286
|
|
|
132
|
-
record_sql = self._get_record_migration_sql(
|
|
287
|
+
record_sql = self._get_record_migration_sql(
|
|
288
|
+
version, version_type, execution_sequence, description, execution_time_ms, checksum, applied_by
|
|
289
|
+
)
|
|
133
290
|
driver.execute(record_sql)
|
|
134
291
|
driver.commit()
|
|
135
292
|
|
|
@@ -144,16 +301,107 @@ class OracleSyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrack
|
|
|
144
301
|
driver.execute(remove_sql)
|
|
145
302
|
driver.commit()
|
|
146
303
|
|
|
304
|
+
def update_version_record(self, driver: "SyncDriverAdapterBase", old_version: str, new_version: str) -> None:
|
|
305
|
+
"""Update migration version record from timestamp to sequential.
|
|
306
|
+
|
|
307
|
+
Updates version_num and version_type while preserving execution_sequence,
|
|
308
|
+
applied_at, and other tracking metadata. Used during fix command.
|
|
309
|
+
|
|
310
|
+
Idempotent: If the version is already updated, logs and continues without error.
|
|
311
|
+
This allows fix command to be safely re-run after pulling changes.
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
driver: The database driver to use.
|
|
315
|
+
old_version: Current timestamp version string.
|
|
316
|
+
new_version: New sequential version string.
|
|
317
|
+
|
|
318
|
+
Raises:
|
|
319
|
+
ValueError: If neither old_version nor new_version found in database.
|
|
320
|
+
"""
|
|
321
|
+
parsed_new_version = parse_version(new_version)
|
|
322
|
+
new_version_type = parsed_new_version.type.value
|
|
323
|
+
|
|
324
|
+
result = driver.execute(self._get_update_version_sql(old_version, new_version, new_version_type))
|
|
325
|
+
|
|
326
|
+
if result.rows_affected == 0:
|
|
327
|
+
check_result = driver.execute(self._get_applied_migrations_sql())
|
|
328
|
+
applied_versions = {row["version_num"] for row in check_result.data} if check_result.data else set()
|
|
329
|
+
|
|
330
|
+
if new_version in applied_versions:
|
|
331
|
+
logger.debug("Version already updated: %s -> %s", old_version, new_version)
|
|
332
|
+
return
|
|
333
|
+
|
|
334
|
+
msg = f"Migration {old_version} not found in database for update to {new_version}"
|
|
335
|
+
raise ValueError(msg)
|
|
336
|
+
|
|
337
|
+
driver.commit()
|
|
338
|
+
|
|
147
339
|
|
|
148
340
|
class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTracker["AsyncDriverAdapterBase"]):
|
|
149
341
|
"""Oracle-specific async migration tracker."""
|
|
150
342
|
|
|
151
343
|
__slots__ = ()
|
|
152
344
|
|
|
345
|
+
async def _migrate_schema_if_needed(self, driver: "AsyncDriverAdapterBase") -> None:
|
|
346
|
+
"""Check for and add any missing columns to the tracking table.
|
|
347
|
+
|
|
348
|
+
Uses the driver's data dictionary to query existing columns from Oracle's
|
|
349
|
+
USER_TAB_COLUMNS metadata table.
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
driver: The database driver to use.
|
|
353
|
+
"""
|
|
354
|
+
try:
|
|
355
|
+
columns_data = await driver.data_dictionary.get_columns(driver, self.version_table)
|
|
356
|
+
existing_columns = {str(row["column_name"]).upper() for row in columns_data}
|
|
357
|
+
missing_columns = self._detect_missing_columns(existing_columns)
|
|
358
|
+
|
|
359
|
+
if not missing_columns:
|
|
360
|
+
logger.debug("Migration tracking table schema is up-to-date")
|
|
361
|
+
return
|
|
362
|
+
|
|
363
|
+
console.print(
|
|
364
|
+
f"[cyan]Migrating tracking table schema, adding columns: {', '.join(sorted(missing_columns))}[/]"
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
for col_name in sorted(missing_columns):
|
|
368
|
+
await self._add_column(driver, col_name)
|
|
369
|
+
|
|
370
|
+
await driver.commit()
|
|
371
|
+
console.print("[green]Migration tracking table schema updated successfully[/]")
|
|
372
|
+
|
|
373
|
+
except Exception as e:
|
|
374
|
+
logger.warning("Could not check or migrate tracking table schema: %s", e)
|
|
375
|
+
|
|
376
|
+
async def _add_column(self, driver: "AsyncDriverAdapterBase", column_name: str) -> None:
|
|
377
|
+
"""Add a single column to the tracking table.
|
|
378
|
+
|
|
379
|
+
Args:
|
|
380
|
+
driver: The database driver to use.
|
|
381
|
+
column_name: Name of the column to add (lowercase).
|
|
382
|
+
"""
|
|
383
|
+
target_create = self._get_create_table_sql()
|
|
384
|
+
column_def = next((col for col in target_create.columns if col.name.lower() == column_name), None)
|
|
385
|
+
|
|
386
|
+
if not column_def:
|
|
387
|
+
return
|
|
388
|
+
|
|
389
|
+
default_clause = f" DEFAULT {column_def.default}" if column_def.default else ""
|
|
390
|
+
not_null_clause = " NOT NULL" if column_def.not_null else ""
|
|
391
|
+
|
|
392
|
+
alter_sql = f"""
|
|
393
|
+
ALTER TABLE {self.version_table}
|
|
394
|
+
ADD {column_def.name} {column_def.dtype}{default_clause}{not_null_clause}
|
|
395
|
+
"""
|
|
396
|
+
|
|
397
|
+
await driver.execute(alter_sql)
|
|
398
|
+
logger.debug("Added column %s to tracking table", column_name)
|
|
399
|
+
|
|
153
400
|
async def ensure_tracking_table(self, driver: "AsyncDriverAdapterBase") -> None:
|
|
154
401
|
"""Create the migration tracking table if it doesn't exist.
|
|
155
402
|
|
|
156
403
|
Uses a PL/SQL block to make the operation atomic and prevent race conditions.
|
|
404
|
+
Also checks for and adds missing columns to support schema migrations.
|
|
157
405
|
|
|
158
406
|
Args:
|
|
159
407
|
driver: The database driver to use.
|
|
@@ -163,6 +411,8 @@ class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrac
|
|
|
163
411
|
EXECUTE IMMEDIATE '
|
|
164
412
|
CREATE TABLE {self.version_table} (
|
|
165
413
|
version_num VARCHAR2(32) PRIMARY KEY,
|
|
414
|
+
version_type VARCHAR2(16),
|
|
415
|
+
execution_sequence INTEGER,
|
|
166
416
|
description VARCHAR2(2000),
|
|
167
417
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
168
418
|
execution_time_ms INTEGER,
|
|
@@ -181,7 +431,9 @@ class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrac
|
|
|
181
431
|
await driver.execute_script(create_script)
|
|
182
432
|
await driver.commit()
|
|
183
433
|
|
|
184
|
-
|
|
434
|
+
await self._migrate_schema_if_needed(driver)
|
|
435
|
+
|
|
436
|
+
async def get_current_version(self, driver: "AsyncDriverAdapterBase") -> "str | None":
|
|
185
437
|
"""Get the latest applied migration version.
|
|
186
438
|
|
|
187
439
|
Args:
|
|
@@ -191,7 +443,8 @@ class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrac
|
|
|
191
443
|
The current migration version or None if no migrations applied.
|
|
192
444
|
"""
|
|
193
445
|
result = await driver.execute(self._get_current_version_sql())
|
|
194
|
-
|
|
446
|
+
data = result.get_data()
|
|
447
|
+
return data[0]["version_num"] if data else None
|
|
195
448
|
|
|
196
449
|
async def get_applied_migrations(self, driver: "AsyncDriverAdapterBase") -> "list[dict[str, Any]]":
|
|
197
450
|
"""Get all applied migrations in order.
|
|
@@ -200,15 +453,10 @@ class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrac
|
|
|
200
453
|
driver: The database driver to use.
|
|
201
454
|
|
|
202
455
|
Returns:
|
|
203
|
-
List of migration records as dictionaries.
|
|
456
|
+
List of migration records as dictionaries with lowercase keys.
|
|
204
457
|
"""
|
|
205
458
|
result = await driver.execute(self._get_applied_migrations_sql())
|
|
206
|
-
|
|
207
|
-
return []
|
|
208
|
-
|
|
209
|
-
normalized_data = [{key.lower(): value for key, value in row.items()} for row in result.data]
|
|
210
|
-
|
|
211
|
-
return cast("list[dict[str, Any]]", normalized_data)
|
|
459
|
+
return result.get_data()
|
|
212
460
|
|
|
213
461
|
async def record_migration(
|
|
214
462
|
self, driver: "AsyncDriverAdapterBase", version: str, description: str, execution_time_ms: int, checksum: str
|
|
@@ -224,8 +472,16 @@ class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrac
|
|
|
224
472
|
"""
|
|
225
473
|
|
|
226
474
|
applied_by = getpass.getuser()
|
|
475
|
+
parsed_version = parse_version(version)
|
|
476
|
+
version_type = parsed_version.type.value
|
|
477
|
+
|
|
478
|
+
next_seq_result = await driver.execute(self._get_next_execution_sequence_sql())
|
|
479
|
+
seq_data = next_seq_result.get_data()
|
|
480
|
+
execution_sequence = seq_data[0]["next_seq"] if seq_data else 1
|
|
227
481
|
|
|
228
|
-
record_sql = self._get_record_migration_sql(
|
|
482
|
+
record_sql = self._get_record_migration_sql(
|
|
483
|
+
version, version_type, execution_sequence, description, execution_time_ms, checksum, applied_by
|
|
484
|
+
)
|
|
229
485
|
await driver.execute(record_sql)
|
|
230
486
|
await driver.commit()
|
|
231
487
|
|
|
@@ -239,3 +495,38 @@ class OracleAsyncMigrationTracker(OracleMigrationTrackerMixin, BaseMigrationTrac
|
|
|
239
495
|
remove_sql = self._get_remove_migration_sql(version)
|
|
240
496
|
await driver.execute(remove_sql)
|
|
241
497
|
await driver.commit()
|
|
498
|
+
|
|
499
|
+
async def update_version_record(self, driver: "AsyncDriverAdapterBase", old_version: str, new_version: str) -> None:
|
|
500
|
+
"""Update migration version record from timestamp to sequential.
|
|
501
|
+
|
|
502
|
+
Updates version_num and version_type while preserving execution_sequence,
|
|
503
|
+
applied_at, and other tracking metadata. Used during fix command.
|
|
504
|
+
|
|
505
|
+
Idempotent: If the version is already updated, logs and continues without error.
|
|
506
|
+
This allows fix command to be safely re-run after pulling changes.
|
|
507
|
+
|
|
508
|
+
Args:
|
|
509
|
+
driver: The database driver to use.
|
|
510
|
+
old_version: Current timestamp version string.
|
|
511
|
+
new_version: New sequential version string.
|
|
512
|
+
|
|
513
|
+
Raises:
|
|
514
|
+
ValueError: If neither old_version nor new_version found in database.
|
|
515
|
+
"""
|
|
516
|
+
parsed_new_version = parse_version(new_version)
|
|
517
|
+
new_version_type = parsed_new_version.type.value
|
|
518
|
+
|
|
519
|
+
result = await driver.execute(self._get_update_version_sql(old_version, new_version, new_version_type))
|
|
520
|
+
|
|
521
|
+
if result.rows_affected == 0:
|
|
522
|
+
check_result = await driver.execute(self._get_applied_migrations_sql())
|
|
523
|
+
applied_versions = {row["version_num"] for row in check_result.data} if check_result.data else set()
|
|
524
|
+
|
|
525
|
+
if new_version in applied_versions:
|
|
526
|
+
logger.debug("Version already updated: %s -> %s", old_version, new_version)
|
|
527
|
+
return
|
|
528
|
+
|
|
529
|
+
msg = f"Migration {old_version} not found in database for update to {new_version}"
|
|
530
|
+
raise ValueError(msg)
|
|
531
|
+
|
|
532
|
+
await driver.commit()
|
|
@@ -4,14 +4,16 @@ Provides specialized type handling for Oracle databases, including
|
|
|
4
4
|
efficient LOB (Large Object) processing and JSON storage detection.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
+
import array
|
|
7
8
|
import re
|
|
8
9
|
from datetime import datetime
|
|
10
|
+
from functools import lru_cache
|
|
9
11
|
from typing import Any, Final
|
|
10
12
|
|
|
11
13
|
from sqlspec.core.type_conversion import BaseTypeConverter
|
|
14
|
+
from sqlspec.typing import NUMPY_INSTALLED
|
|
12
15
|
from sqlspec.utils.sync_tools import ensure_async_
|
|
13
16
|
|
|
14
|
-
# Oracle-specific JSON storage detection
|
|
15
17
|
ORACLE_JSON_STORAGE_REGEX: Final[re.Pattern[str]] = re.compile(
|
|
16
18
|
r"^(?:"
|
|
17
19
|
r"(?P<json_type>JSON)|"
|
|
@@ -22,15 +24,50 @@ ORACLE_JSON_STORAGE_REGEX: Final[re.Pattern[str]] = re.compile(
|
|
|
22
24
|
re.IGNORECASE,
|
|
23
25
|
)
|
|
24
26
|
|
|
27
|
+
ORACLE_SPECIAL_CHARS: Final[frozenset[str]] = frozenset({"{", "[", "-", ":", "T", "."})
|
|
28
|
+
|
|
25
29
|
|
|
26
30
|
class OracleTypeConverter(BaseTypeConverter):
|
|
27
31
|
"""Oracle-specific type conversion with LOB optimization.
|
|
28
32
|
|
|
29
33
|
Extends the base TypeDetector with Oracle-specific functionality
|
|
30
34
|
including streaming LOB support and JSON storage type detection.
|
|
35
|
+
Includes per-instance LRU cache for improved performance.
|
|
31
36
|
"""
|
|
32
37
|
|
|
33
|
-
__slots__ = ()
|
|
38
|
+
__slots__ = ("_convert_cache",)
|
|
39
|
+
|
|
40
|
+
def __init__(self, cache_size: int = 5000) -> None:
|
|
41
|
+
"""Initialize converter with per-instance conversion cache.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
cache_size: Maximum number of string values to cache (default: 5000)
|
|
45
|
+
"""
|
|
46
|
+
super().__init__()
|
|
47
|
+
|
|
48
|
+
@lru_cache(maxsize=cache_size)
|
|
49
|
+
def _cached_convert(value: str) -> Any:
|
|
50
|
+
if not value or not any(c in value for c in ORACLE_SPECIAL_CHARS):
|
|
51
|
+
return value
|
|
52
|
+
detected_type = self.detect_type(value)
|
|
53
|
+
if detected_type:
|
|
54
|
+
return self.convert_value(value, detected_type)
|
|
55
|
+
return value
|
|
56
|
+
|
|
57
|
+
self._convert_cache = _cached_convert
|
|
58
|
+
|
|
59
|
+
def convert_if_detected(self, value: Any) -> Any:
|
|
60
|
+
"""Convert string if special type detected (cached).
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
value: Value to potentially convert
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Converted value or original value
|
|
67
|
+
"""
|
|
68
|
+
if not isinstance(value, str):
|
|
69
|
+
return value
|
|
70
|
+
return self._convert_cache(value)
|
|
34
71
|
|
|
35
72
|
async def process_lob(self, value: Any) -> Any:
|
|
36
73
|
"""Process Oracle LOB objects efficiently.
|
|
@@ -44,7 +81,6 @@ class OracleTypeConverter(BaseTypeConverter):
|
|
|
44
81
|
if not hasattr(value, "read"):
|
|
45
82
|
return value
|
|
46
83
|
|
|
47
|
-
# Use ensure_async_ for unified sync/async handling
|
|
48
84
|
read_func = ensure_async_(value.read)
|
|
49
85
|
return await read_func()
|
|
50
86
|
|
|
@@ -106,27 +142,66 @@ class OracleTypeConverter(BaseTypeConverter):
|
|
|
106
142
|
Returns:
|
|
107
143
|
Converted value appropriate for the column type.
|
|
108
144
|
"""
|
|
109
|
-
# Handle LOB objects
|
|
110
145
|
if hasattr(value, "read"):
|
|
111
146
|
if self.detect_json_storage_type(column_info):
|
|
112
|
-
# For JSON storage types, decode the LOB content
|
|
113
147
|
content = self.handle_large_lob(value)
|
|
114
148
|
content_str = content.decode("utf-8") if isinstance(content, bytes) else content
|
|
115
|
-
|
|
116
|
-
detected_type = self.detect_type(content_str)
|
|
117
|
-
if detected_type == "json":
|
|
118
|
-
return self.convert_value(content_str, detected_type)
|
|
119
|
-
return content_str
|
|
120
|
-
# For other LOB types, return raw content
|
|
149
|
+
return self.convert_if_detected(content_str)
|
|
121
150
|
return self.handle_large_lob(value)
|
|
122
151
|
|
|
123
|
-
# Use base type detection for non-LOB values
|
|
124
152
|
if isinstance(value, str):
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
153
|
+
return self.convert_if_detected(value)
|
|
154
|
+
|
|
155
|
+
return value
|
|
156
|
+
|
|
157
|
+
def convert_vector_to_numpy(self, value: Any) -> Any:
|
|
158
|
+
"""Convert Oracle VECTOR to NumPy array.
|
|
159
|
+
|
|
160
|
+
Provides manual conversion API for users who need explicit control
|
|
161
|
+
over vector transformations or have disabled automatic handlers.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
value: Oracle VECTOR value (array.array) or other value.
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
NumPy ndarray if value is array.array and NumPy is installed,
|
|
168
|
+
otherwise original value.
|
|
169
|
+
"""
|
|
170
|
+
if not NUMPY_INSTALLED:
|
|
171
|
+
return value
|
|
172
|
+
|
|
173
|
+
if isinstance(value, array.array):
|
|
174
|
+
from sqlspec.adapters.oracledb._numpy_handlers import numpy_converter_out
|
|
175
|
+
|
|
176
|
+
return numpy_converter_out(value)
|
|
177
|
+
|
|
178
|
+
return value
|
|
179
|
+
|
|
180
|
+
def convert_numpy_to_vector(self, value: Any) -> Any:
|
|
181
|
+
"""Convert NumPy array to Oracle VECTOR format.
|
|
182
|
+
|
|
183
|
+
Provides manual conversion API for users who need explicit control
|
|
184
|
+
over vector transformations or have disabled automatic handlers.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
value: NumPy ndarray or other value.
|
|
188
|
+
|
|
189
|
+
Returns:
|
|
190
|
+
array.array compatible with Oracle VECTOR if value is ndarray,
|
|
191
|
+
otherwise original value.
|
|
192
|
+
|
|
193
|
+
"""
|
|
194
|
+
if not NUMPY_INSTALLED:
|
|
195
|
+
return value
|
|
196
|
+
|
|
197
|
+
import numpy as np
|
|
198
|
+
|
|
199
|
+
if isinstance(value, np.ndarray):
|
|
200
|
+
from sqlspec.adapters.oracledb._numpy_handlers import numpy_converter_in
|
|
201
|
+
|
|
202
|
+
return numpy_converter_in(value)
|
|
128
203
|
|
|
129
204
|
return value
|
|
130
205
|
|
|
131
206
|
|
|
132
|
-
__all__ = ("ORACLE_JSON_STORAGE_REGEX", "OracleTypeConverter")
|
|
207
|
+
__all__ = ("ORACLE_JSON_STORAGE_REGEX", "ORACLE_SPECIAL_CHARS", "OracleTypeConverter")
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
"""Psqlpy pgvector type handlers for vector data type support.
|
|
2
|
+
|
|
3
|
+
Provides automatic conversion between NumPy arrays and PostgreSQL vector types
|
|
4
|
+
via pgvector-python library when integrated with psqlpy connection pool.
|
|
5
|
+
|
|
6
|
+
Note:
|
|
7
|
+
Full pgvector support for psqlpy is planned for a future release.
|
|
8
|
+
The driver_features infrastructure (enable_pgvector) has been implemented
|
|
9
|
+
to enable this feature when the underlying psqlpy library adds support for
|
|
10
|
+
custom type handlers on pool initialization.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import logging
|
|
14
|
+
from typing import TYPE_CHECKING
|
|
15
|
+
|
|
16
|
+
from sqlspec.typing import PGVECTOR_INSTALLED
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from psqlpy import Connection
|
|
20
|
+
|
|
21
|
+
__all__ = ("register_pgvector",)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def register_pgvector(connection: "Connection") -> None:
|
|
28
|
+
"""Register pgvector type handlers on psqlpy connection.
|
|
29
|
+
|
|
30
|
+
Currently a placeholder for future implementation. The psqlpy library
|
|
31
|
+
does not yet expose a type handler registration API compatible with
|
|
32
|
+
pgvector's automatic conversion system.
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
connection: Psqlpy connection instance.
|
|
36
|
+
|
|
37
|
+
Note:
|
|
38
|
+
When psqlpy adds type handler support, this function will:
|
|
39
|
+
- Register pgvector extension on the connection
|
|
40
|
+
- Enable automatic NumPy array <-> PostgreSQL vector conversion
|
|
41
|
+
- Support vector similarity search operations
|
|
42
|
+
"""
|
|
43
|
+
if not PGVECTOR_INSTALLED:
|
|
44
|
+
return
|