sqlspec 0.32.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlspec/__init__.py +104 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +312 -0
- sqlspec/_typing.py +784 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_types.py +12 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +880 -0
- sqlspec/adapters/adbc/config.py +436 -0
- sqlspec/adapters/adbc/data_dictionary.py +537 -0
- sqlspec/adapters/adbc/driver.py +841 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +504 -0
- sqlspec/adapters/adbc/type_converter.py +153 -0
- sqlspec/adapters/aiosqlite/__init__.py +29 -0
- sqlspec/adapters/aiosqlite/_types.py +13 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +536 -0
- sqlspec/adapters/aiosqlite/config.py +310 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +260 -0
- sqlspec/adapters/aiosqlite/driver.py +463 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
- sqlspec/adapters/aiosqlite/pool.py +500 -0
- sqlspec/adapters/asyncmy/__init__.py +25 -0
- sqlspec/adapters/asyncmy/_types.py +12 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +503 -0
- sqlspec/adapters/asyncmy/config.py +246 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +241 -0
- sqlspec/adapters/asyncmy/driver.py +632 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +23 -0
- sqlspec/adapters/asyncpg/_type_handlers.py +76 -0
- sqlspec/adapters/asyncpg/_types.py +23 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +460 -0
- sqlspec/adapters/asyncpg/config.py +464 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +321 -0
- sqlspec/adapters/asyncpg/driver.py +720 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +253 -0
- sqlspec/adapters/bigquery/__init__.py +18 -0
- sqlspec/adapters/bigquery/_types.py +12 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +585 -0
- sqlspec/adapters/bigquery/config.py +298 -0
- sqlspec/adapters/bigquery/data_dictionary.py +256 -0
- sqlspec/adapters/bigquery/driver.py +1073 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +327 -0
- sqlspec/adapters/bigquery/type_converter.py +125 -0
- sqlspec/adapters/duckdb/__init__.py +24 -0
- sqlspec/adapters/duckdb/_types.py +12 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +563 -0
- sqlspec/adapters/duckdb/config.py +396 -0
- sqlspec/adapters/duckdb/data_dictionary.py +264 -0
- sqlspec/adapters/duckdb/driver.py +604 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +332 -0
- sqlspec/adapters/duckdb/pool.py +273 -0
- sqlspec/adapters/duckdb/type_converter.py +133 -0
- sqlspec/adapters/oracledb/__init__.py +32 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
- sqlspec/adapters/oracledb/_types.py +39 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +130 -0
- sqlspec/adapters/oracledb/adk/__init__.py +5 -0
- sqlspec/adapters/oracledb/adk/store.py +1632 -0
- sqlspec/adapters/oracledb/config.py +469 -0
- sqlspec/adapters/oracledb/data_dictionary.py +717 -0
- sqlspec/adapters/oracledb/driver.py +1493 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +765 -0
- sqlspec/adapters/oracledb/migrations.py +532 -0
- sqlspec/adapters/oracledb/type_converter.py +207 -0
- sqlspec/adapters/psqlpy/__init__.py +16 -0
- sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
- sqlspec/adapters/psqlpy/_types.py +12 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +483 -0
- sqlspec/adapters/psqlpy/config.py +271 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +179 -0
- sqlspec/adapters/psqlpy/driver.py +892 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +272 -0
- sqlspec/adapters/psqlpy/type_converter.py +102 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_type_handlers.py +90 -0
- sqlspec/adapters/psycopg/_types.py +18 -0
- sqlspec/adapters/psycopg/adk/__init__.py +5 -0
- sqlspec/adapters/psycopg/adk/store.py +962 -0
- sqlspec/adapters/psycopg/config.py +487 -0
- sqlspec/adapters/psycopg/data_dictionary.py +630 -0
- sqlspec/adapters/psycopg/driver.py +1336 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +554 -0
- sqlspec/adapters/spanner/__init__.py +38 -0
- sqlspec/adapters/spanner/_type_handlers.py +186 -0
- sqlspec/adapters/spanner/_types.py +12 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +435 -0
- sqlspec/adapters/spanner/config.py +241 -0
- sqlspec/adapters/spanner/data_dictionary.py +95 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +52 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +123 -0
- sqlspec/adapters/spanner/driver.py +366 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +266 -0
- sqlspec/adapters/spanner/type_converter.py +46 -0
- sqlspec/adapters/sqlite/__init__.py +18 -0
- sqlspec/adapters/sqlite/_type_handlers.py +86 -0
- sqlspec/adapters/sqlite/_types.py +11 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +582 -0
- sqlspec/adapters/sqlite/config.py +221 -0
- sqlspec/adapters/sqlite/data_dictionary.py +256 -0
- sqlspec/adapters/sqlite/driver.py +527 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +318 -0
- sqlspec/adapters/sqlite/pool.py +140 -0
- sqlspec/base.py +811 -0
- sqlspec/builder/__init__.py +146 -0
- sqlspec/builder/_base.py +900 -0
- sqlspec/builder/_column.py +517 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.py +84 -0
- sqlspec/builder/_dml.py +381 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.py +1537 -0
- sqlspec/builder/_insert.py +315 -0
- sqlspec/builder/_join.py +375 -0
- sqlspec/builder/_merge.py +848 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.py +1615 -0
- sqlspec/builder/_update.py +161 -0
- sqlspec/builder/_vector_expressions.py +259 -0
- sqlspec/cli.py +764 -0
- sqlspec/config.py +1540 -0
- sqlspec/core/__init__.py +305 -0
- sqlspec/core/cache.py +785 -0
- sqlspec/core/compiler.py +603 -0
- sqlspec/core/filters.py +872 -0
- sqlspec/core/hashing.py +274 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +64 -0
- sqlspec/core/parameters/_alignment.py +266 -0
- sqlspec/core/parameters/_converter.py +413 -0
- sqlspec/core/parameters/_processor.py +341 -0
- sqlspec/core/parameters/_registry.py +201 -0
- sqlspec/core/parameters/_transformers.py +226 -0
- sqlspec/core/parameters/_types.py +430 -0
- sqlspec/core/parameters/_validator.py +123 -0
- sqlspec/core/pipeline.py +187 -0
- sqlspec/core/result.py +1124 -0
- sqlspec/core/splitter.py +940 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.py +835 -0
- sqlspec/core/type_conversion.py +235 -0
- sqlspec/driver/__init__.py +36 -0
- sqlspec/driver/_async.py +1027 -0
- sqlspec/driver/_common.py +1236 -0
- sqlspec/driver/_sync.py +1025 -0
- sqlspec/driver/mixins/__init__.py +7 -0
- sqlspec/driver/mixins/_result_tools.py +61 -0
- sqlspec/driver/mixins/_sql_translator.py +122 -0
- sqlspec/driver/mixins/_storage.py +311 -0
- sqlspec/exceptions.py +321 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +53 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +181 -0
- sqlspec/extensions/adk/store.py +536 -0
- sqlspec/extensions/aiosql/__init__.py +10 -0
- sqlspec/extensions/aiosql/adapter.py +471 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +341 -0
- sqlspec/extensions/fastapi/providers.py +543 -0
- sqlspec/extensions/flask/__init__.py +36 -0
- sqlspec/extensions/flask/_state.py +72 -0
- sqlspec/extensions/flask/_utils.py +40 -0
- sqlspec/extensions/flask/extension.py +402 -0
- sqlspec/extensions/litestar/__init__.py +23 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/cli.py +92 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +638 -0
- sqlspec/extensions/litestar/providers.py +454 -0
- sqlspec/extensions/litestar/store.py +265 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +107 -0
- sqlspec/extensions/starlette/__init__.py +10 -0
- sqlspec/extensions/starlette/_state.py +26 -0
- sqlspec/extensions/starlette/_utils.py +52 -0
- sqlspec/extensions/starlette/extension.py +257 -0
- sqlspec/extensions/starlette/middleware.py +154 -0
- sqlspec/loader.py +716 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +728 -0
- sqlspec/migrations/commands.py +1140 -0
- sqlspec/migrations/context.py +142 -0
- sqlspec/migrations/fix.py +203 -0
- sqlspec/migrations/loaders.py +450 -0
- sqlspec/migrations/runner.py +1024 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +403 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +203 -0
- sqlspec/observability/__init__.py +22 -0
- sqlspec/observability/_config.py +228 -0
- sqlspec/observability/_diagnostics.py +67 -0
- sqlspec/observability/_dispatcher.py +151 -0
- sqlspec/observability/_observer.py +180 -0
- sqlspec/observability/_runtime.py +381 -0
- sqlspec/observability/_spans.py +158 -0
- sqlspec/protocols.py +530 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +46 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +163 -0
- sqlspec/storage/backends/fsspec.py +398 -0
- sqlspec/storage/backends/local.py +377 -0
- sqlspec/storage/backends/obstore.py +580 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +604 -0
- sqlspec/storage/registry.py +289 -0
- sqlspec/typing.py +219 -0
- sqlspec/utils/__init__.py +31 -0
- sqlspec/utils/arrow_helpers.py +95 -0
- sqlspec/utils/config_resolver.py +153 -0
- sqlspec/utils/correlation.py +132 -0
- sqlspec/utils/data_transformation.py +114 -0
- sqlspec/utils/dependencies.py +79 -0
- sqlspec/utils/deprecation.py +113 -0
- sqlspec/utils/fixtures.py +250 -0
- sqlspec/utils/logging.py +172 -0
- sqlspec/utils/module_loader.py +273 -0
- sqlspec/utils/portal.py +325 -0
- sqlspec/utils/schema.py +288 -0
- sqlspec/utils/serializers.py +396 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.py +277 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.py +99 -0
- sqlspec/utils/type_guards.py +1324 -0
- sqlspec/utils/version.py +444 -0
- sqlspec-0.32.0.dist-info/METADATA +202 -0
- sqlspec-0.32.0.dist-info/RECORD +262 -0
- sqlspec-0.32.0.dist-info/WHEEL +4 -0
- sqlspec-0.32.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.32.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,537 @@
|
|
|
1
|
+
"""ADBC multi-dialect data dictionary for metadata queries."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
5
|
+
|
|
6
|
+
from sqlspec.driver import ForeignKeyMetadata, SyncDataDictionaryBase, SyncDriverAdapterBase, VersionInfo
|
|
7
|
+
from sqlspec.utils.logging import get_logger
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from collections.abc import Callable
|
|
11
|
+
|
|
12
|
+
from sqlspec.adapters.adbc.driver import AdbcDriver
|
|
13
|
+
|
|
14
|
+
logger = get_logger("adapters.adbc.data_dictionary")
|
|
15
|
+
|
|
16
|
+
POSTGRES_VERSION_PATTERN = re.compile(r"PostgreSQL (\d+)\.(\d+)(?:\.(\d+))?")
|
|
17
|
+
SQLITE_VERSION_PATTERN = re.compile(r"(\d+)\.(\d+)\.(\d+)")
|
|
18
|
+
DUCKDB_VERSION_PATTERN = re.compile(r"v?(\d+)\.(\d+)\.(\d+)")
|
|
19
|
+
MYSQL_VERSION_PATTERN = re.compile(r"(\d+)\.(\d+)\.(\d+)")
|
|
20
|
+
|
|
21
|
+
__all__ = ("AdbcDataDictionary",)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class AdbcDataDictionary(SyncDataDictionaryBase):
|
|
25
|
+
"""ADBC multi-dialect data dictionary.
|
|
26
|
+
|
|
27
|
+
Delegates to appropriate dialect-specific logic based on the driver's dialect.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def get_foreign_keys(
|
|
31
|
+
self, driver: "SyncDriverAdapterBase", table: "str | None" = None, schema: "str | None" = None
|
|
32
|
+
) -> "list[ForeignKeyMetadata]":
|
|
33
|
+
"""Get foreign key metadata based on detected dialect."""
|
|
34
|
+
|
|
35
|
+
dialect = self._get_dialect(driver)
|
|
36
|
+
adbc_driver = cast("AdbcDriver", driver)
|
|
37
|
+
|
|
38
|
+
if dialect == "sqlite":
|
|
39
|
+
if table:
|
|
40
|
+
# Single table
|
|
41
|
+
result = adbc_driver.execute(f"PRAGMA foreign_key_list('{table}')")
|
|
42
|
+
# SQLite PRAGMA returns: id, seq, table, from, to, on_update, on_delete, match
|
|
43
|
+
# We need 'from' (col) and 'table' (ref_table) and 'to' (ref_col)
|
|
44
|
+
# Note: PRAGMA results from ADBC might be keyed by name or index depending on driver
|
|
45
|
+
return [
|
|
46
|
+
ForeignKeyMetadata(
|
|
47
|
+
table_name=table,
|
|
48
|
+
column_name=row["from"] if isinstance(row, dict) else row[3],
|
|
49
|
+
referenced_table=row["table"] if isinstance(row, dict) else row[2],
|
|
50
|
+
referenced_column=row["to"] if isinstance(row, dict) else row[4],
|
|
51
|
+
)
|
|
52
|
+
for row in result.data
|
|
53
|
+
]
|
|
54
|
+
# For all tables in SQLite we'd have to iterate, which base class doesn't do efficiently.
|
|
55
|
+
# We'll just return empty if no table specified for now, or iterate if crucial.
|
|
56
|
+
# Base implementation will call this per-table if needed? No, base implementation expects all if table is None.
|
|
57
|
+
# For SQLite ADBC, iterating tables is expensive. Let's support single table primarily.
|
|
58
|
+
return []
|
|
59
|
+
|
|
60
|
+
# SQL-standard compliant databases (Postgres, MySQL, DuckDB, BigQuery)
|
|
61
|
+
# They all support information_schema.key_column_usage roughly the same way
|
|
62
|
+
|
|
63
|
+
# Postgres/DuckDB/MySQL query
|
|
64
|
+
params = []
|
|
65
|
+
|
|
66
|
+
if dialect == "bigquery":
|
|
67
|
+
dataset = schema
|
|
68
|
+
if not dataset:
|
|
69
|
+
return [] # BigQuery requires dataset for info schema
|
|
70
|
+
kcu = f"`{dataset}.INFORMATION_SCHEMA.KEY_COLUMN_USAGE`"
|
|
71
|
+
rc = f"`{dataset}.INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS`"
|
|
72
|
+
# BQ uses named params usually or positional? ADBC usually positional '?'
|
|
73
|
+
# But BQ driver might want named. ADBC standardizes on '?' usually.
|
|
74
|
+
sql = f"""
|
|
75
|
+
SELECT
|
|
76
|
+
kcu.table_name,
|
|
77
|
+
kcu.column_name,
|
|
78
|
+
pk_kcu.table_name AS referenced_table_name,
|
|
79
|
+
pk_kcu.column_name AS referenced_column_name,
|
|
80
|
+
kcu.constraint_name,
|
|
81
|
+
kcu.table_schema,
|
|
82
|
+
pk_kcu.table_schema AS referenced_table_schema
|
|
83
|
+
FROM {kcu} kcu
|
|
84
|
+
JOIN {rc} rc ON kcu.constraint_name = rc.constraint_name
|
|
85
|
+
JOIN {kcu} pk_kcu
|
|
86
|
+
ON rc.unique_constraint_name = pk_kcu.constraint_name
|
|
87
|
+
AND kcu.ordinal_position = pk_kcu.ordinal_position
|
|
88
|
+
"""
|
|
89
|
+
if table:
|
|
90
|
+
sql += f" WHERE kcu.table_name = '{table}'" # Simple string sub for BQ ADBC safety check needed?
|
|
91
|
+
|
|
92
|
+
try:
|
|
93
|
+
result = adbc_driver.execute(sql)
|
|
94
|
+
return [
|
|
95
|
+
ForeignKeyMetadata(
|
|
96
|
+
table_name=row["table_name"],
|
|
97
|
+
column_name=row["column_name"],
|
|
98
|
+
referenced_table=row["referenced_table_name"],
|
|
99
|
+
referenced_column=row["referenced_column_name"],
|
|
100
|
+
constraint_name=row["constraint_name"],
|
|
101
|
+
schema=row["table_schema"],
|
|
102
|
+
referenced_schema=row["referenced_table_schema"],
|
|
103
|
+
)
|
|
104
|
+
for row in result.data
|
|
105
|
+
]
|
|
106
|
+
except Exception:
|
|
107
|
+
return []
|
|
108
|
+
|
|
109
|
+
# Standard ANSI SQL (Postgres, MySQL, DuckDB)
|
|
110
|
+
kcu = "information_schema.key_column_usage"
|
|
111
|
+
|
|
112
|
+
if dialect == "postgres":
|
|
113
|
+
sql = """
|
|
114
|
+
SELECT
|
|
115
|
+
kcu.table_name,
|
|
116
|
+
kcu.column_name,
|
|
117
|
+
ccu.table_name AS referenced_table_name,
|
|
118
|
+
ccu.column_name AS referenced_column_name,
|
|
119
|
+
tc.constraint_name,
|
|
120
|
+
tc.table_schema,
|
|
121
|
+
ccu.table_schema AS referenced_table_schema
|
|
122
|
+
FROM
|
|
123
|
+
information_schema.table_constraints AS tc
|
|
124
|
+
JOIN information_schema.key_column_usage AS kcu
|
|
125
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
126
|
+
AND tc.table_schema = kcu.table_schema
|
|
127
|
+
JOIN information_schema.constraint_column_usage AS ccu
|
|
128
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
129
|
+
AND ccu.table_schema = tc.table_schema
|
|
130
|
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
131
|
+
"""
|
|
132
|
+
if schema:
|
|
133
|
+
sql += " AND tc.table_schema = ?"
|
|
134
|
+
params.append(schema)
|
|
135
|
+
if table:
|
|
136
|
+
sql += " AND tc.table_name = ?"
|
|
137
|
+
params.append(table)
|
|
138
|
+
|
|
139
|
+
elif dialect == "mysql":
|
|
140
|
+
# MySQL information_schema
|
|
141
|
+
sql = """
|
|
142
|
+
SELECT
|
|
143
|
+
table_name,
|
|
144
|
+
column_name,
|
|
145
|
+
referenced_table_name,
|
|
146
|
+
referenced_column_name,
|
|
147
|
+
constraint_name,
|
|
148
|
+
table_schema,
|
|
149
|
+
referenced_table_schema
|
|
150
|
+
FROM information_schema.key_column_usage
|
|
151
|
+
WHERE referenced_table_name IS NOT NULL
|
|
152
|
+
"""
|
|
153
|
+
if schema:
|
|
154
|
+
sql += " AND table_schema = ?"
|
|
155
|
+
params.append(schema)
|
|
156
|
+
if table:
|
|
157
|
+
sql += " AND table_name = ?"
|
|
158
|
+
params.append(table)
|
|
159
|
+
|
|
160
|
+
elif dialect == "duckdb":
|
|
161
|
+
# DuckDB similar to Postgres but sometimes requires referential_constraints join
|
|
162
|
+
sql = """
|
|
163
|
+
SELECT
|
|
164
|
+
kcu.table_name,
|
|
165
|
+
kcu.column_name,
|
|
166
|
+
pk_kcu.table_name AS referenced_table_name,
|
|
167
|
+
pk_kcu.column_name AS referenced_column_name,
|
|
168
|
+
kcu.constraint_name,
|
|
169
|
+
kcu.table_schema,
|
|
170
|
+
pk_kcu.table_schema AS referenced_table_schema
|
|
171
|
+
FROM information_schema.key_column_usage kcu
|
|
172
|
+
JOIN information_schema.referential_constraints rc
|
|
173
|
+
ON kcu.constraint_name = rc.constraint_name
|
|
174
|
+
JOIN information_schema.key_column_usage pk_kcu
|
|
175
|
+
ON rc.unique_constraint_name = pk_kcu.constraint_name
|
|
176
|
+
AND kcu.ordinal_position = pk_kcu.ordinal_position
|
|
177
|
+
WHERE 1=1
|
|
178
|
+
"""
|
|
179
|
+
if schema:
|
|
180
|
+
sql += " AND kcu.table_schema = ?"
|
|
181
|
+
params.append(schema)
|
|
182
|
+
if table:
|
|
183
|
+
sql += " AND kcu.table_name = ?"
|
|
184
|
+
params.append(table)
|
|
185
|
+
else:
|
|
186
|
+
return []
|
|
187
|
+
|
|
188
|
+
try:
|
|
189
|
+
result = adbc_driver.execute(sql, tuple(params))
|
|
190
|
+
return [
|
|
191
|
+
ForeignKeyMetadata(
|
|
192
|
+
table_name=row["table_name"],
|
|
193
|
+
column_name=row["column_name"],
|
|
194
|
+
referenced_table=row["referenced_table_name"],
|
|
195
|
+
referenced_column=row["referenced_column_name"],
|
|
196
|
+
constraint_name=row["constraint_name"],
|
|
197
|
+
schema=row.get("table_schema"),
|
|
198
|
+
referenced_schema=row.get("referenced_table_schema"),
|
|
199
|
+
)
|
|
200
|
+
for row in result.data
|
|
201
|
+
]
|
|
202
|
+
except Exception:
|
|
203
|
+
return []
|
|
204
|
+
|
|
205
|
+
def _get_dialect(self, driver: SyncDriverAdapterBase) -> str:
|
|
206
|
+
"""Get dialect from ADBC driver.
|
|
207
|
+
|
|
208
|
+
Args:
|
|
209
|
+
driver: ADBC driver instance
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
Dialect name
|
|
213
|
+
"""
|
|
214
|
+
return str(cast("AdbcDriver", driver).dialect)
|
|
215
|
+
|
|
216
|
+
def get_version(self, driver: SyncDriverAdapterBase) -> "VersionInfo | None":
|
|
217
|
+
"""Get database version information based on detected dialect.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
driver: ADBC driver instance
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
Database version information or None if detection fails
|
|
224
|
+
"""
|
|
225
|
+
dialect = self._get_dialect(driver)
|
|
226
|
+
adbc_driver = cast("AdbcDriver", driver)
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
if dialect == "postgres":
|
|
230
|
+
version_str = adbc_driver.select_value("SELECT version()")
|
|
231
|
+
if version_str:
|
|
232
|
+
match = POSTGRES_VERSION_PATTERN.search(str(version_str))
|
|
233
|
+
if match:
|
|
234
|
+
major = int(match.group(1))
|
|
235
|
+
minor = int(match.group(2))
|
|
236
|
+
patch = int(match.group(3)) if match.group(3) else 0
|
|
237
|
+
return VersionInfo(major, minor, patch)
|
|
238
|
+
|
|
239
|
+
elif dialect == "sqlite":
|
|
240
|
+
version_str = adbc_driver.select_value("SELECT sqlite_version()")
|
|
241
|
+
if version_str:
|
|
242
|
+
match = SQLITE_VERSION_PATTERN.match(str(version_str))
|
|
243
|
+
if match:
|
|
244
|
+
major, minor, patch = map(int, match.groups())
|
|
245
|
+
return VersionInfo(major, minor, patch)
|
|
246
|
+
|
|
247
|
+
elif dialect == "duckdb":
|
|
248
|
+
version_str = adbc_driver.select_value("SELECT version()")
|
|
249
|
+
if version_str:
|
|
250
|
+
match = DUCKDB_VERSION_PATTERN.search(str(version_str))
|
|
251
|
+
if match:
|
|
252
|
+
major, minor, patch = map(int, match.groups())
|
|
253
|
+
return VersionInfo(major, minor, patch)
|
|
254
|
+
|
|
255
|
+
elif dialect == "mysql":
|
|
256
|
+
version_str = adbc_driver.select_value("SELECT VERSION()")
|
|
257
|
+
if version_str:
|
|
258
|
+
match = MYSQL_VERSION_PATTERN.search(str(version_str))
|
|
259
|
+
if match:
|
|
260
|
+
major, minor, patch = map(int, match.groups())
|
|
261
|
+
return VersionInfo(major, minor, patch)
|
|
262
|
+
|
|
263
|
+
elif dialect == "bigquery":
|
|
264
|
+
return VersionInfo(1, 0, 0)
|
|
265
|
+
|
|
266
|
+
except Exception:
|
|
267
|
+
logger.warning("Failed to get %s version", dialect)
|
|
268
|
+
|
|
269
|
+
return None
|
|
270
|
+
|
|
271
|
+
def get_feature_flag(self, driver: SyncDriverAdapterBase, feature: str) -> bool:
|
|
272
|
+
"""Check if database supports a specific feature based on detected dialect.
|
|
273
|
+
|
|
274
|
+
Args:
|
|
275
|
+
driver: ADBC driver instance
|
|
276
|
+
feature: Feature name to check
|
|
277
|
+
|
|
278
|
+
Returns:
|
|
279
|
+
True if feature is supported, False otherwise
|
|
280
|
+
"""
|
|
281
|
+
dialect = self._get_dialect(driver)
|
|
282
|
+
version_info = self.get_version(driver)
|
|
283
|
+
|
|
284
|
+
if dialect == "postgres":
|
|
285
|
+
feature_checks: dict[str, Callable[..., bool]] = {
|
|
286
|
+
"supports_json": lambda v: v and v >= VersionInfo(9, 2, 0),
|
|
287
|
+
"supports_jsonb": lambda v: v and v >= VersionInfo(9, 4, 0),
|
|
288
|
+
"supports_uuid": lambda _: True,
|
|
289
|
+
"supports_arrays": lambda _: True,
|
|
290
|
+
"supports_returning": lambda v: v and v >= VersionInfo(8, 2, 0),
|
|
291
|
+
"supports_upsert": lambda v: v and v >= VersionInfo(9, 5, 0),
|
|
292
|
+
"supports_window_functions": lambda v: v and v >= VersionInfo(8, 4, 0),
|
|
293
|
+
"supports_cte": lambda v: v and v >= VersionInfo(8, 4, 0),
|
|
294
|
+
"supports_transactions": lambda _: True,
|
|
295
|
+
"supports_prepared_statements": lambda _: True,
|
|
296
|
+
"supports_schemas": lambda _: True,
|
|
297
|
+
}
|
|
298
|
+
elif dialect == "sqlite":
|
|
299
|
+
feature_checks = {
|
|
300
|
+
"supports_json": lambda v: v and v >= VersionInfo(3, 38, 0),
|
|
301
|
+
"supports_returning": lambda v: v and v >= VersionInfo(3, 35, 0),
|
|
302
|
+
"supports_upsert": lambda v: v and v >= VersionInfo(3, 24, 0),
|
|
303
|
+
"supports_window_functions": lambda v: v and v >= VersionInfo(3, 25, 0),
|
|
304
|
+
"supports_cte": lambda v: v and v >= VersionInfo(3, 8, 3),
|
|
305
|
+
"supports_transactions": lambda _: True,
|
|
306
|
+
"supports_prepared_statements": lambda _: True,
|
|
307
|
+
"supports_schemas": lambda _: False,
|
|
308
|
+
"supports_arrays": lambda _: False,
|
|
309
|
+
"supports_uuid": lambda _: False,
|
|
310
|
+
}
|
|
311
|
+
elif dialect == "duckdb":
|
|
312
|
+
feature_checks = {
|
|
313
|
+
"supports_json": lambda _: True,
|
|
314
|
+
"supports_arrays": lambda _: True,
|
|
315
|
+
"supports_uuid": lambda _: True,
|
|
316
|
+
"supports_returning": lambda v: v and v >= VersionInfo(0, 8, 0),
|
|
317
|
+
"supports_upsert": lambda v: v and v >= VersionInfo(0, 8, 0),
|
|
318
|
+
"supports_window_functions": lambda _: True,
|
|
319
|
+
"supports_cte": lambda _: True,
|
|
320
|
+
"supports_transactions": lambda _: True,
|
|
321
|
+
"supports_prepared_statements": lambda _: True,
|
|
322
|
+
"supports_schemas": lambda _: True,
|
|
323
|
+
}
|
|
324
|
+
elif dialect == "mysql":
|
|
325
|
+
feature_checks = {
|
|
326
|
+
"supports_json": lambda v: v and v >= VersionInfo(5, 7, 8),
|
|
327
|
+
"supports_cte": lambda v: v and v >= VersionInfo(8, 0, 1),
|
|
328
|
+
"supports_returning": lambda _: False,
|
|
329
|
+
"supports_upsert": lambda _: True,
|
|
330
|
+
"supports_window_functions": lambda v: v and v >= VersionInfo(8, 0, 2),
|
|
331
|
+
"supports_transactions": lambda _: True,
|
|
332
|
+
"supports_prepared_statements": lambda _: True,
|
|
333
|
+
"supports_schemas": lambda _: True,
|
|
334
|
+
"supports_uuid": lambda _: False,
|
|
335
|
+
"supports_arrays": lambda _: False,
|
|
336
|
+
}
|
|
337
|
+
elif dialect == "bigquery":
|
|
338
|
+
feature_checks = {
|
|
339
|
+
"supports_json": lambda _: True,
|
|
340
|
+
"supports_arrays": lambda _: True,
|
|
341
|
+
"supports_structs": lambda _: True,
|
|
342
|
+
"supports_returning": lambda _: False,
|
|
343
|
+
"supports_upsert": lambda _: True,
|
|
344
|
+
"supports_window_functions": lambda _: True,
|
|
345
|
+
"supports_cte": lambda _: True,
|
|
346
|
+
"supports_transactions": lambda _: False,
|
|
347
|
+
"supports_prepared_statements": lambda _: True,
|
|
348
|
+
"supports_schemas": lambda _: True,
|
|
349
|
+
"supports_uuid": lambda _: False,
|
|
350
|
+
}
|
|
351
|
+
else:
|
|
352
|
+
feature_checks = {
|
|
353
|
+
"supports_transactions": lambda _: True,
|
|
354
|
+
"supports_prepared_statements": lambda _: True,
|
|
355
|
+
"supports_window_functions": lambda _: True,
|
|
356
|
+
"supports_cte": lambda _: True,
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
if feature in feature_checks:
|
|
360
|
+
return bool(feature_checks[feature](version_info))
|
|
361
|
+
|
|
362
|
+
return False
|
|
363
|
+
|
|
364
|
+
def get_optimal_type(self, driver: SyncDriverAdapterBase, type_category: str) -> str:
|
|
365
|
+
"""Get optimal database type for a category based on detected dialect.
|
|
366
|
+
|
|
367
|
+
Args:
|
|
368
|
+
driver: ADBC driver instance
|
|
369
|
+
type_category: Type category
|
|
370
|
+
|
|
371
|
+
Returns:
|
|
372
|
+
Database-specific type name
|
|
373
|
+
"""
|
|
374
|
+
dialect = self._get_dialect(driver)
|
|
375
|
+
version_info = self.get_version(driver)
|
|
376
|
+
|
|
377
|
+
if dialect == "postgres":
|
|
378
|
+
if type_category == "json":
|
|
379
|
+
if version_info and version_info >= VersionInfo(9, 4, 0):
|
|
380
|
+
return "JSONB"
|
|
381
|
+
if version_info and version_info >= VersionInfo(9, 2, 0):
|
|
382
|
+
return "JSON"
|
|
383
|
+
return "TEXT"
|
|
384
|
+
type_map = {
|
|
385
|
+
"uuid": "UUID",
|
|
386
|
+
"boolean": "BOOLEAN",
|
|
387
|
+
"timestamp": "TIMESTAMP WITH TIME ZONE",
|
|
388
|
+
"text": "TEXT",
|
|
389
|
+
"blob": "BYTEA",
|
|
390
|
+
"array": "ARRAY",
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
elif dialect == "sqlite":
|
|
394
|
+
if type_category == "json":
|
|
395
|
+
if version_info and version_info >= VersionInfo(3, 38, 0):
|
|
396
|
+
return "JSON"
|
|
397
|
+
return "TEXT"
|
|
398
|
+
type_map = {"uuid": "TEXT", "boolean": "INTEGER", "timestamp": "TIMESTAMP", "text": "TEXT", "blob": "BLOB"}
|
|
399
|
+
|
|
400
|
+
elif dialect == "duckdb":
|
|
401
|
+
type_map = {
|
|
402
|
+
"json": "JSON",
|
|
403
|
+
"uuid": "UUID",
|
|
404
|
+
"boolean": "BOOLEAN",
|
|
405
|
+
"timestamp": "TIMESTAMP",
|
|
406
|
+
"text": "TEXT",
|
|
407
|
+
"blob": "BLOB",
|
|
408
|
+
"array": "LIST",
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
elif dialect == "mysql":
|
|
412
|
+
if type_category == "json":
|
|
413
|
+
if version_info and version_info >= VersionInfo(5, 7, 8):
|
|
414
|
+
return "JSON"
|
|
415
|
+
return "TEXT"
|
|
416
|
+
type_map = {
|
|
417
|
+
"uuid": "VARCHAR(36)",
|
|
418
|
+
"boolean": "TINYINT(1)",
|
|
419
|
+
"timestamp": "TIMESTAMP",
|
|
420
|
+
"text": "TEXT",
|
|
421
|
+
"blob": "BLOB",
|
|
422
|
+
}
|
|
423
|
+
|
|
424
|
+
elif dialect == "bigquery":
|
|
425
|
+
type_map = {
|
|
426
|
+
"json": "JSON",
|
|
427
|
+
"uuid": "STRING",
|
|
428
|
+
"boolean": "BOOL",
|
|
429
|
+
"timestamp": "TIMESTAMP",
|
|
430
|
+
"text": "STRING",
|
|
431
|
+
"blob": "BYTES",
|
|
432
|
+
"array": "ARRAY",
|
|
433
|
+
}
|
|
434
|
+
else:
|
|
435
|
+
type_map = {
|
|
436
|
+
"json": "TEXT",
|
|
437
|
+
"uuid": "VARCHAR(36)",
|
|
438
|
+
"boolean": "INTEGER",
|
|
439
|
+
"timestamp": "TIMESTAMP",
|
|
440
|
+
"text": "TEXT",
|
|
441
|
+
"blob": "BLOB",
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
return type_map.get(type_category, "TEXT")
|
|
445
|
+
|
|
446
|
+
def get_columns(
|
|
447
|
+
self, driver: SyncDriverAdapterBase, table: str, schema: "str | None" = None
|
|
448
|
+
) -> "list[dict[str, Any]]":
|
|
449
|
+
"""Get column information for a table based on detected dialect.
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
driver: ADBC driver instance
|
|
453
|
+
table: Table name to query columns for
|
|
454
|
+
schema: Schema name (None for default)
|
|
455
|
+
|
|
456
|
+
Returns:
|
|
457
|
+
List of column metadata dictionaries with keys:
|
|
458
|
+
- column_name: Name of the column
|
|
459
|
+
- data_type: Database data type
|
|
460
|
+
- is_nullable or nullable: Whether column allows NULL
|
|
461
|
+
- column_default or default_value: Default value if any
|
|
462
|
+
"""
|
|
463
|
+
dialect = self._get_dialect(driver)
|
|
464
|
+
adbc_driver = cast("AdbcDriver", driver)
|
|
465
|
+
|
|
466
|
+
if dialect == "sqlite":
|
|
467
|
+
result = adbc_driver.execute(f"PRAGMA table_info({table})")
|
|
468
|
+
return [
|
|
469
|
+
{
|
|
470
|
+
"column_name": row["name"] if isinstance(row, dict) else row[1],
|
|
471
|
+
"data_type": row["type"] if isinstance(row, dict) else row[2],
|
|
472
|
+
"nullable": not (row["notnull"] if isinstance(row, dict) else row[3]),
|
|
473
|
+
"default_value": row["dflt_value"] if isinstance(row, dict) else row[4],
|
|
474
|
+
}
|
|
475
|
+
for row in result.data or []
|
|
476
|
+
]
|
|
477
|
+
|
|
478
|
+
if dialect == "postgres":
|
|
479
|
+
schema_name = schema or "public"
|
|
480
|
+
sql = """
|
|
481
|
+
SELECT
|
|
482
|
+
a.attname::text AS column_name,
|
|
483
|
+
pg_catalog.format_type(a.atttypid, a.atttypmod) AS data_type,
|
|
484
|
+
CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END AS is_nullable,
|
|
485
|
+
pg_catalog.pg_get_expr(d.adbin, d.adrelid)::text AS column_default
|
|
486
|
+
FROM pg_catalog.pg_attribute a
|
|
487
|
+
JOIN pg_catalog.pg_class c ON a.attrelid = c.oid
|
|
488
|
+
JOIN pg_catalog.pg_namespace n ON c.relnamespace = n.oid
|
|
489
|
+
LEFT JOIN pg_catalog.pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
|
|
490
|
+
WHERE c.relname = ?
|
|
491
|
+
AND n.nspname = ?
|
|
492
|
+
AND a.attnum > 0
|
|
493
|
+
AND NOT a.attisdropped
|
|
494
|
+
ORDER BY a.attnum
|
|
495
|
+
"""
|
|
496
|
+
result = adbc_driver.execute(sql, (table, schema_name))
|
|
497
|
+
return result.data or []
|
|
498
|
+
|
|
499
|
+
if schema:
|
|
500
|
+
sql = """
|
|
501
|
+
SELECT column_name, data_type, is_nullable, column_default
|
|
502
|
+
FROM information_schema.columns
|
|
503
|
+
WHERE table_name = ? AND table_schema = ?
|
|
504
|
+
ORDER BY ordinal_position
|
|
505
|
+
"""
|
|
506
|
+
result = adbc_driver.execute(sql, (table, schema))
|
|
507
|
+
else:
|
|
508
|
+
sql = """
|
|
509
|
+
SELECT column_name, data_type, is_nullable, column_default
|
|
510
|
+
FROM information_schema.columns
|
|
511
|
+
WHERE table_name = ?
|
|
512
|
+
ORDER BY ordinal_position
|
|
513
|
+
"""
|
|
514
|
+
result = adbc_driver.execute(sql, (table,))
|
|
515
|
+
|
|
516
|
+
return result.data or []
|
|
517
|
+
|
|
518
|
+
def list_available_features(self) -> "list[str]":
|
|
519
|
+
"""List available feature flags across all supported dialects.
|
|
520
|
+
|
|
521
|
+
Returns:
|
|
522
|
+
List of supported feature names
|
|
523
|
+
"""
|
|
524
|
+
return [
|
|
525
|
+
"supports_json",
|
|
526
|
+
"supports_jsonb",
|
|
527
|
+
"supports_uuid",
|
|
528
|
+
"supports_arrays",
|
|
529
|
+
"supports_structs",
|
|
530
|
+
"supports_returning",
|
|
531
|
+
"supports_upsert",
|
|
532
|
+
"supports_window_functions",
|
|
533
|
+
"supports_cte",
|
|
534
|
+
"supports_transactions",
|
|
535
|
+
"supports_prepared_statements",
|
|
536
|
+
"supports_schemas",
|
|
537
|
+
]
|