sqlspec 0.32.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sqlspec/__init__.py +104 -0
- sqlspec/__main__.py +12 -0
- sqlspec/__metadata__.py +14 -0
- sqlspec/_serialization.py +312 -0
- sqlspec/_typing.py +784 -0
- sqlspec/adapters/__init__.py +0 -0
- sqlspec/adapters/adbc/__init__.py +5 -0
- sqlspec/adapters/adbc/_types.py +12 -0
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +880 -0
- sqlspec/adapters/adbc/config.py +436 -0
- sqlspec/adapters/adbc/data_dictionary.py +537 -0
- sqlspec/adapters/adbc/driver.py +841 -0
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +504 -0
- sqlspec/adapters/adbc/type_converter.py +153 -0
- sqlspec/adapters/aiosqlite/__init__.py +29 -0
- sqlspec/adapters/aiosqlite/_types.py +13 -0
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +536 -0
- sqlspec/adapters/aiosqlite/config.py +310 -0
- sqlspec/adapters/aiosqlite/data_dictionary.py +260 -0
- sqlspec/adapters/aiosqlite/driver.py +463 -0
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
- sqlspec/adapters/aiosqlite/pool.py +500 -0
- sqlspec/adapters/asyncmy/__init__.py +25 -0
- sqlspec/adapters/asyncmy/_types.py +12 -0
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +503 -0
- sqlspec/adapters/asyncmy/config.py +246 -0
- sqlspec/adapters/asyncmy/data_dictionary.py +241 -0
- sqlspec/adapters/asyncmy/driver.py +632 -0
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +23 -0
- sqlspec/adapters/asyncpg/_type_handlers.py +76 -0
- sqlspec/adapters/asyncpg/_types.py +23 -0
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +460 -0
- sqlspec/adapters/asyncpg/config.py +464 -0
- sqlspec/adapters/asyncpg/data_dictionary.py +321 -0
- sqlspec/adapters/asyncpg/driver.py +720 -0
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +253 -0
- sqlspec/adapters/bigquery/__init__.py +18 -0
- sqlspec/adapters/bigquery/_types.py +12 -0
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +585 -0
- sqlspec/adapters/bigquery/config.py +298 -0
- sqlspec/adapters/bigquery/data_dictionary.py +256 -0
- sqlspec/adapters/bigquery/driver.py +1073 -0
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +327 -0
- sqlspec/adapters/bigquery/type_converter.py +125 -0
- sqlspec/adapters/duckdb/__init__.py +24 -0
- sqlspec/adapters/duckdb/_types.py +12 -0
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +563 -0
- sqlspec/adapters/duckdb/config.py +396 -0
- sqlspec/adapters/duckdb/data_dictionary.py +264 -0
- sqlspec/adapters/duckdb/driver.py +604 -0
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +332 -0
- sqlspec/adapters/duckdb/pool.py +273 -0
- sqlspec/adapters/duckdb/type_converter.py +133 -0
- sqlspec/adapters/oracledb/__init__.py +32 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
- sqlspec/adapters/oracledb/_types.py +39 -0
- sqlspec/adapters/oracledb/_uuid_handlers.py +130 -0
- sqlspec/adapters/oracledb/adk/__init__.py +5 -0
- sqlspec/adapters/oracledb/adk/store.py +1632 -0
- sqlspec/adapters/oracledb/config.py +469 -0
- sqlspec/adapters/oracledb/data_dictionary.py +717 -0
- sqlspec/adapters/oracledb/driver.py +1493 -0
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +765 -0
- sqlspec/adapters/oracledb/migrations.py +532 -0
- sqlspec/adapters/oracledb/type_converter.py +207 -0
- sqlspec/adapters/psqlpy/__init__.py +16 -0
- sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
- sqlspec/adapters/psqlpy/_types.py +12 -0
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +483 -0
- sqlspec/adapters/psqlpy/config.py +271 -0
- sqlspec/adapters/psqlpy/data_dictionary.py +179 -0
- sqlspec/adapters/psqlpy/driver.py +892 -0
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +272 -0
- sqlspec/adapters/psqlpy/type_converter.py +102 -0
- sqlspec/adapters/psycopg/__init__.py +32 -0
- sqlspec/adapters/psycopg/_type_handlers.py +90 -0
- sqlspec/adapters/psycopg/_types.py +18 -0
- sqlspec/adapters/psycopg/adk/__init__.py +5 -0
- sqlspec/adapters/psycopg/adk/store.py +962 -0
- sqlspec/adapters/psycopg/config.py +487 -0
- sqlspec/adapters/psycopg/data_dictionary.py +630 -0
- sqlspec/adapters/psycopg/driver.py +1336 -0
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +554 -0
- sqlspec/adapters/spanner/__init__.py +38 -0
- sqlspec/adapters/spanner/_type_handlers.py +186 -0
- sqlspec/adapters/spanner/_types.py +12 -0
- sqlspec/adapters/spanner/adk/__init__.py +5 -0
- sqlspec/adapters/spanner/adk/store.py +435 -0
- sqlspec/adapters/spanner/config.py +241 -0
- sqlspec/adapters/spanner/data_dictionary.py +95 -0
- sqlspec/adapters/spanner/dialect/__init__.py +6 -0
- sqlspec/adapters/spanner/dialect/_spangres.py +52 -0
- sqlspec/adapters/spanner/dialect/_spanner.py +123 -0
- sqlspec/adapters/spanner/driver.py +366 -0
- sqlspec/adapters/spanner/litestar/__init__.py +5 -0
- sqlspec/adapters/spanner/litestar/store.py +266 -0
- sqlspec/adapters/spanner/type_converter.py +46 -0
- sqlspec/adapters/sqlite/__init__.py +18 -0
- sqlspec/adapters/sqlite/_type_handlers.py +86 -0
- sqlspec/adapters/sqlite/_types.py +11 -0
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +582 -0
- sqlspec/adapters/sqlite/config.py +221 -0
- sqlspec/adapters/sqlite/data_dictionary.py +256 -0
- sqlspec/adapters/sqlite/driver.py +527 -0
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +318 -0
- sqlspec/adapters/sqlite/pool.py +140 -0
- sqlspec/base.py +811 -0
- sqlspec/builder/__init__.py +146 -0
- sqlspec/builder/_base.py +900 -0
- sqlspec/builder/_column.py +517 -0
- sqlspec/builder/_ddl.py +1642 -0
- sqlspec/builder/_delete.py +84 -0
- sqlspec/builder/_dml.py +381 -0
- sqlspec/builder/_expression_wrappers.py +46 -0
- sqlspec/builder/_factory.py +1537 -0
- sqlspec/builder/_insert.py +315 -0
- sqlspec/builder/_join.py +375 -0
- sqlspec/builder/_merge.py +848 -0
- sqlspec/builder/_parsing_utils.py +297 -0
- sqlspec/builder/_select.py +1615 -0
- sqlspec/builder/_update.py +161 -0
- sqlspec/builder/_vector_expressions.py +259 -0
- sqlspec/cli.py +764 -0
- sqlspec/config.py +1540 -0
- sqlspec/core/__init__.py +305 -0
- sqlspec/core/cache.py +785 -0
- sqlspec/core/compiler.py +603 -0
- sqlspec/core/filters.py +872 -0
- sqlspec/core/hashing.py +274 -0
- sqlspec/core/metrics.py +83 -0
- sqlspec/core/parameters/__init__.py +64 -0
- sqlspec/core/parameters/_alignment.py +266 -0
- sqlspec/core/parameters/_converter.py +413 -0
- sqlspec/core/parameters/_processor.py +341 -0
- sqlspec/core/parameters/_registry.py +201 -0
- sqlspec/core/parameters/_transformers.py +226 -0
- sqlspec/core/parameters/_types.py +430 -0
- sqlspec/core/parameters/_validator.py +123 -0
- sqlspec/core/pipeline.py +187 -0
- sqlspec/core/result.py +1124 -0
- sqlspec/core/splitter.py +940 -0
- sqlspec/core/stack.py +163 -0
- sqlspec/core/statement.py +835 -0
- sqlspec/core/type_conversion.py +235 -0
- sqlspec/driver/__init__.py +36 -0
- sqlspec/driver/_async.py +1027 -0
- sqlspec/driver/_common.py +1236 -0
- sqlspec/driver/_sync.py +1025 -0
- sqlspec/driver/mixins/__init__.py +7 -0
- sqlspec/driver/mixins/_result_tools.py +61 -0
- sqlspec/driver/mixins/_sql_translator.py +122 -0
- sqlspec/driver/mixins/_storage.py +311 -0
- sqlspec/exceptions.py +321 -0
- sqlspec/extensions/__init__.py +0 -0
- sqlspec/extensions/adk/__init__.py +53 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +181 -0
- sqlspec/extensions/adk/store.py +536 -0
- sqlspec/extensions/aiosql/__init__.py +10 -0
- sqlspec/extensions/aiosql/adapter.py +471 -0
- sqlspec/extensions/fastapi/__init__.py +19 -0
- sqlspec/extensions/fastapi/extension.py +341 -0
- sqlspec/extensions/fastapi/providers.py +543 -0
- sqlspec/extensions/flask/__init__.py +36 -0
- sqlspec/extensions/flask/_state.py +72 -0
- sqlspec/extensions/flask/_utils.py +40 -0
- sqlspec/extensions/flask/extension.py +402 -0
- sqlspec/extensions/litestar/__init__.py +23 -0
- sqlspec/extensions/litestar/_utils.py +52 -0
- sqlspec/extensions/litestar/cli.py +92 -0
- sqlspec/extensions/litestar/config.py +90 -0
- sqlspec/extensions/litestar/handlers.py +316 -0
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +638 -0
- sqlspec/extensions/litestar/providers.py +454 -0
- sqlspec/extensions/litestar/store.py +265 -0
- sqlspec/extensions/otel/__init__.py +58 -0
- sqlspec/extensions/prometheus/__init__.py +107 -0
- sqlspec/extensions/starlette/__init__.py +10 -0
- sqlspec/extensions/starlette/_state.py +26 -0
- sqlspec/extensions/starlette/_utils.py +52 -0
- sqlspec/extensions/starlette/extension.py +257 -0
- sqlspec/extensions/starlette/middleware.py +154 -0
- sqlspec/loader.py +716 -0
- sqlspec/migrations/__init__.py +36 -0
- sqlspec/migrations/base.py +728 -0
- sqlspec/migrations/commands.py +1140 -0
- sqlspec/migrations/context.py +142 -0
- sqlspec/migrations/fix.py +203 -0
- sqlspec/migrations/loaders.py +450 -0
- sqlspec/migrations/runner.py +1024 -0
- sqlspec/migrations/templates.py +234 -0
- sqlspec/migrations/tracker.py +403 -0
- sqlspec/migrations/utils.py +256 -0
- sqlspec/migrations/validation.py +203 -0
- sqlspec/observability/__init__.py +22 -0
- sqlspec/observability/_config.py +228 -0
- sqlspec/observability/_diagnostics.py +67 -0
- sqlspec/observability/_dispatcher.py +151 -0
- sqlspec/observability/_observer.py +180 -0
- sqlspec/observability/_runtime.py +381 -0
- sqlspec/observability/_spans.py +158 -0
- sqlspec/protocols.py +530 -0
- sqlspec/py.typed +0 -0
- sqlspec/storage/__init__.py +46 -0
- sqlspec/storage/_utils.py +104 -0
- sqlspec/storage/backends/__init__.py +1 -0
- sqlspec/storage/backends/base.py +163 -0
- sqlspec/storage/backends/fsspec.py +398 -0
- sqlspec/storage/backends/local.py +377 -0
- sqlspec/storage/backends/obstore.py +580 -0
- sqlspec/storage/errors.py +104 -0
- sqlspec/storage/pipeline.py +604 -0
- sqlspec/storage/registry.py +289 -0
- sqlspec/typing.py +219 -0
- sqlspec/utils/__init__.py +31 -0
- sqlspec/utils/arrow_helpers.py +95 -0
- sqlspec/utils/config_resolver.py +153 -0
- sqlspec/utils/correlation.py +132 -0
- sqlspec/utils/data_transformation.py +114 -0
- sqlspec/utils/dependencies.py +79 -0
- sqlspec/utils/deprecation.py +113 -0
- sqlspec/utils/fixtures.py +250 -0
- sqlspec/utils/logging.py +172 -0
- sqlspec/utils/module_loader.py +273 -0
- sqlspec/utils/portal.py +325 -0
- sqlspec/utils/schema.py +288 -0
- sqlspec/utils/serializers.py +396 -0
- sqlspec/utils/singleton.py +41 -0
- sqlspec/utils/sync_tools.py +277 -0
- sqlspec/utils/text.py +108 -0
- sqlspec/utils/type_converters.py +99 -0
- sqlspec/utils/type_guards.py +1324 -0
- sqlspec/utils/version.py +444 -0
- sqlspec-0.32.0.dist-info/METADATA +202 -0
- sqlspec-0.32.0.dist-info/RECORD +262 -0
- sqlspec-0.32.0.dist-info/WHEEL +4 -0
- sqlspec-0.32.0.dist-info/entry_points.txt +2 -0
- sqlspec-0.32.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
"""Spanner configuration."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Callable, Generator
|
|
4
|
+
from contextlib import contextmanager
|
|
5
|
+
from typing import TYPE_CHECKING, Any, ClassVar, TypedDict, cast
|
|
6
|
+
|
|
7
|
+
from google.cloud.spanner_v1 import Client
|
|
8
|
+
from google.cloud.spanner_v1.pool import AbstractSessionPool, FixedSizePool
|
|
9
|
+
from typing_extensions import NotRequired
|
|
10
|
+
|
|
11
|
+
from sqlspec.adapters.spanner._types import SpannerConnection
|
|
12
|
+
from sqlspec.adapters.spanner.driver import SpannerSyncDriver, spanner_statement_config
|
|
13
|
+
from sqlspec.config import SyncDatabaseConfig
|
|
14
|
+
from sqlspec.exceptions import ImproperConfigurationError
|
|
15
|
+
from sqlspec.utils.serializers import from_json, to_json
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from google.auth.credentials import Credentials
|
|
19
|
+
from google.cloud.spanner_v1.database import Database
|
|
20
|
+
|
|
21
|
+
from sqlspec.config import ExtensionConfigs
|
|
22
|
+
from sqlspec.core import StatementConfig
|
|
23
|
+
from sqlspec.observability import ObservabilityConfig
|
|
24
|
+
|
|
25
|
+
__all__ = ("SpannerConnectionParams", "SpannerDriverFeatures", "SpannerPoolParams", "SpannerSyncConfig")
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class SpannerConnectionParams(TypedDict):
|
|
29
|
+
"""Spanner connection parameters."""
|
|
30
|
+
|
|
31
|
+
project: "NotRequired[str]"
|
|
32
|
+
instance_id: "NotRequired[str]"
|
|
33
|
+
database_id: "NotRequired[str]"
|
|
34
|
+
credentials: "NotRequired[Credentials]"
|
|
35
|
+
client_options: "NotRequired[dict[str, Any]]"
|
|
36
|
+
extra: "NotRequired[dict[str, Any]]"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class SpannerPoolParams(SpannerConnectionParams):
|
|
40
|
+
"""Session pool configuration."""
|
|
41
|
+
|
|
42
|
+
pool_type: "NotRequired[type[AbstractSessionPool]]"
|
|
43
|
+
min_sessions: "NotRequired[int]"
|
|
44
|
+
max_sessions: "NotRequired[int]"
|
|
45
|
+
labels: "NotRequired[dict[str, str]]"
|
|
46
|
+
ping_interval: "NotRequired[int]"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class SpannerDriverFeatures(TypedDict):
|
|
50
|
+
"""Driver feature flags for Spanner."""
|
|
51
|
+
|
|
52
|
+
enable_uuid_conversion: "NotRequired[bool]"
|
|
53
|
+
json_serializer: "NotRequired[Callable[[Any], str]]"
|
|
54
|
+
json_deserializer: "NotRequired[Callable[[str], Any]]"
|
|
55
|
+
session_labels: "NotRequired[dict[str, str]]"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class SpannerSyncConfig(SyncDatabaseConfig["SpannerConnection", "AbstractSessionPool", SpannerSyncDriver]):
|
|
59
|
+
"""Spanner configuration and session management."""
|
|
60
|
+
|
|
61
|
+
driver_type: ClassVar[type["SpannerSyncDriver"]] = SpannerSyncDriver
|
|
62
|
+
connection_type: ClassVar[type["SpannerConnection"]] = cast("type[SpannerConnection]", SpannerConnection)
|
|
63
|
+
supports_transactional_ddl: ClassVar[bool] = False
|
|
64
|
+
supports_native_arrow_export: ClassVar[bool] = True
|
|
65
|
+
supports_native_arrow_import: ClassVar[bool] = True
|
|
66
|
+
supports_native_parquet_export: ClassVar[bool] = False
|
|
67
|
+
supports_native_parquet_import: ClassVar[bool] = False
|
|
68
|
+
requires_staging_for_load: ClassVar[bool] = False
|
|
69
|
+
|
|
70
|
+
def __init__(
|
|
71
|
+
self,
|
|
72
|
+
*,
|
|
73
|
+
pool_config: "SpannerPoolParams | dict[str, Any] | None" = None,
|
|
74
|
+
pool_instance: "AbstractSessionPool | None" = None,
|
|
75
|
+
migration_config: "dict[str, Any] | None" = None,
|
|
76
|
+
statement_config: "StatementConfig | None" = None,
|
|
77
|
+
driver_features: "SpannerDriverFeatures | dict[str, Any] | None" = None,
|
|
78
|
+
bind_key: "str | None" = None,
|
|
79
|
+
extension_config: "ExtensionConfigs | None" = None,
|
|
80
|
+
observability_config: "ObservabilityConfig | None" = None,
|
|
81
|
+
) -> None:
|
|
82
|
+
self.pool_config = dict(pool_config) if pool_config else {}
|
|
83
|
+
|
|
84
|
+
self.pool_config.setdefault("min_sessions", 1)
|
|
85
|
+
self.pool_config.setdefault("max_sessions", 10)
|
|
86
|
+
self.pool_config.setdefault("pool_type", FixedSizePool)
|
|
87
|
+
|
|
88
|
+
features: dict[str, Any] = dict(driver_features) if driver_features else {}
|
|
89
|
+
features.setdefault("enable_uuid_conversion", True)
|
|
90
|
+
features.setdefault("json_serializer", to_json)
|
|
91
|
+
features.setdefault("json_deserializer", from_json)
|
|
92
|
+
|
|
93
|
+
base_statement_config = statement_config or spanner_statement_config
|
|
94
|
+
|
|
95
|
+
super().__init__(
|
|
96
|
+
pool_config=self.pool_config,
|
|
97
|
+
pool_instance=pool_instance,
|
|
98
|
+
migration_config=migration_config,
|
|
99
|
+
statement_config=base_statement_config,
|
|
100
|
+
driver_features=features,
|
|
101
|
+
bind_key=bind_key,
|
|
102
|
+
extension_config=extension_config,
|
|
103
|
+
observability_config=observability_config,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
self._client: Client | None = None
|
|
107
|
+
self._database: Database | None = None
|
|
108
|
+
|
|
109
|
+
def _get_client(self) -> Client:
|
|
110
|
+
if self._client is None:
|
|
111
|
+
self._client = Client(
|
|
112
|
+
project=self.pool_config.get("project"),
|
|
113
|
+
credentials=self.pool_config.get("credentials"),
|
|
114
|
+
client_options=self.pool_config.get("client_options"),
|
|
115
|
+
)
|
|
116
|
+
return self._client
|
|
117
|
+
|
|
118
|
+
def get_database(self) -> "Database":
|
|
119
|
+
instance_id = self.pool_config.get("instance_id")
|
|
120
|
+
database_id = self.pool_config.get("database_id")
|
|
121
|
+
if not instance_id or not database_id:
|
|
122
|
+
msg = "instance_id and database_id are required."
|
|
123
|
+
raise ImproperConfigurationError(msg)
|
|
124
|
+
|
|
125
|
+
if self.pool_instance is None:
|
|
126
|
+
self.pool_instance = self.provide_pool()
|
|
127
|
+
|
|
128
|
+
if self._database is None:
|
|
129
|
+
client = self._get_client()
|
|
130
|
+
self._database = client.instance(instance_id).database(database_id, pool=self.pool_instance) # type: ignore[no-untyped-call]
|
|
131
|
+
return self._database
|
|
132
|
+
|
|
133
|
+
def create_connection(self) -> SpannerConnection:
|
|
134
|
+
instance_id = self.pool_config.get("instance_id")
|
|
135
|
+
database_id = self.pool_config.get("database_id")
|
|
136
|
+
if not instance_id or not database_id:
|
|
137
|
+
msg = "instance_id and database_id are required."
|
|
138
|
+
raise ImproperConfigurationError(msg)
|
|
139
|
+
|
|
140
|
+
if self.pool_instance is None:
|
|
141
|
+
self.pool_instance = self.provide_pool()
|
|
142
|
+
|
|
143
|
+
client = self._get_client()
|
|
144
|
+
database = client.instance(instance_id).database(database_id, pool=self.pool_instance) # type: ignore[no-untyped-call]
|
|
145
|
+
return cast("SpannerConnection", database.snapshot())
|
|
146
|
+
|
|
147
|
+
def _create_pool(self) -> AbstractSessionPool:
|
|
148
|
+
instance_id = self.pool_config.get("instance_id")
|
|
149
|
+
database_id = self.pool_config.get("database_id")
|
|
150
|
+
if not instance_id or not database_id:
|
|
151
|
+
msg = "instance_id and database_id are required."
|
|
152
|
+
raise ImproperConfigurationError(msg)
|
|
153
|
+
|
|
154
|
+
pool_type = cast("type[AbstractSessionPool]", self.pool_config.get("pool_type", FixedSizePool))
|
|
155
|
+
|
|
156
|
+
pool_kwargs: dict[str, Any] = {}
|
|
157
|
+
if pool_type is FixedSizePool:
|
|
158
|
+
if "size" in self.pool_config:
|
|
159
|
+
pool_kwargs["size"] = self.pool_config["size"]
|
|
160
|
+
elif "max_sessions" in self.pool_config:
|
|
161
|
+
pool_kwargs["size"] = self.pool_config["max_sessions"]
|
|
162
|
+
if "labels" in self.pool_config:
|
|
163
|
+
pool_kwargs["labels"] = self.pool_config["labels"]
|
|
164
|
+
else:
|
|
165
|
+
valid_pool_keys = {"size", "labels", "ping_interval"}
|
|
166
|
+
pool_kwargs = {k: v for k, v in self.pool_config.items() if k in valid_pool_keys and v is not None}
|
|
167
|
+
if "size" not in pool_kwargs and "max_sessions" in self.pool_config:
|
|
168
|
+
pool_kwargs["size"] = self.pool_config["max_sessions"]
|
|
169
|
+
|
|
170
|
+
pool_factory = cast("Callable[..., AbstractSessionPool]", pool_type)
|
|
171
|
+
return pool_factory(**pool_kwargs)
|
|
172
|
+
|
|
173
|
+
def _close_pool(self) -> None:
|
|
174
|
+
if self.pool_instance and hasattr(self.pool_instance, "close"):
|
|
175
|
+
cast("Any", self.pool_instance).close()
|
|
176
|
+
|
|
177
|
+
@contextmanager
|
|
178
|
+
def provide_connection(
|
|
179
|
+
self, *args: Any, transaction: "bool" = False, **kwargs: Any
|
|
180
|
+
) -> Generator[SpannerConnection, None, None]:
|
|
181
|
+
"""Yield a Snapshot (default) or Transaction context from the configured pool.
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
*args: Additional positional arguments (unused, for interface compatibility).
|
|
185
|
+
transaction: If True, yields a Transaction context that supports
|
|
186
|
+
execute_update() for DML statements. If False (default), yields
|
|
187
|
+
a read-only Snapshot context for SELECT queries.
|
|
188
|
+
**kwargs: Additional keyword arguments (unused, for interface compatibility).
|
|
189
|
+
|
|
190
|
+
Note: For complex transactional logic with retries, use database.run_in_transaction()
|
|
191
|
+
directly. The Transaction context here auto-commits on successful exit.
|
|
192
|
+
"""
|
|
193
|
+
database = self.get_database()
|
|
194
|
+
if transaction:
|
|
195
|
+
session = cast("Any", database).session()
|
|
196
|
+
session.create()
|
|
197
|
+
try:
|
|
198
|
+
txn = session.transaction()
|
|
199
|
+
txn.__enter__()
|
|
200
|
+
try:
|
|
201
|
+
yield cast("SpannerConnection", txn)
|
|
202
|
+
if hasattr(txn, "_transaction_id") and txn._transaction_id is not None:
|
|
203
|
+
txn.commit()
|
|
204
|
+
except Exception:
|
|
205
|
+
if hasattr(txn, "_transaction_id") and txn._transaction_id is not None:
|
|
206
|
+
txn.rollback()
|
|
207
|
+
raise
|
|
208
|
+
finally:
|
|
209
|
+
session.delete()
|
|
210
|
+
else:
|
|
211
|
+
with cast("Any", database).snapshot(multi_use=True) as snapshot:
|
|
212
|
+
yield cast("SpannerConnection", snapshot)
|
|
213
|
+
|
|
214
|
+
@contextmanager
|
|
215
|
+
def provide_session(
|
|
216
|
+
self, *args: Any, statement_config: "StatementConfig | None" = None, transaction: "bool" = False, **kwargs: Any
|
|
217
|
+
) -> Generator[SpannerSyncDriver, None, None]:
|
|
218
|
+
with self.provide_connection(*args, transaction=transaction, **kwargs) as connection:
|
|
219
|
+
driver = self.driver_type(
|
|
220
|
+
connection=connection,
|
|
221
|
+
statement_config=statement_config or self.statement_config,
|
|
222
|
+
driver_features=self.driver_features,
|
|
223
|
+
)
|
|
224
|
+
yield self._prepare_driver(driver)
|
|
225
|
+
|
|
226
|
+
@contextmanager
|
|
227
|
+
def provide_write_session(
|
|
228
|
+
self, *args: Any, statement_config: "StatementConfig | None" = None, **kwargs: Any
|
|
229
|
+
) -> Generator[SpannerSyncDriver, None, None]:
|
|
230
|
+
with self.provide_session(*args, statement_config=statement_config, transaction=True, **kwargs) as driver:
|
|
231
|
+
yield driver
|
|
232
|
+
|
|
233
|
+
def get_signature_namespace(self) -> dict[str, Any]:
|
|
234
|
+
namespace = super().get_signature_namespace()
|
|
235
|
+
namespace.update({
|
|
236
|
+
"SpannerSyncConfig": SpannerSyncConfig,
|
|
237
|
+
"SpannerConnectionParams": SpannerConnectionParams,
|
|
238
|
+
"SpannerDriverFeatures": SpannerDriverFeatures,
|
|
239
|
+
"SpannerSyncDriver": SpannerSyncDriver,
|
|
240
|
+
})
|
|
241
|
+
return namespace
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"""Spanner metadata queries using INFORMATION_SCHEMA."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
4
|
+
|
|
5
|
+
from sqlspec.driver import SyncDataDictionaryBase, SyncDriverAdapterBase
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from sqlspec.driver import VersionInfo
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
__all__ = ("SpannerDataDictionary",)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SpannerDataDictionary(SyncDataDictionaryBase):
|
|
15
|
+
"""Fetch table, column, and index metadata from Spanner."""
|
|
16
|
+
|
|
17
|
+
def get_version(self, driver: "SyncDriverAdapterBase") -> "VersionInfo | None":
|
|
18
|
+
_ = driver
|
|
19
|
+
return None
|
|
20
|
+
|
|
21
|
+
def get_feature_flag(self, driver: "SyncDriverAdapterBase", feature: str) -> bool:
|
|
22
|
+
_ = driver
|
|
23
|
+
feature_checks = {
|
|
24
|
+
"supports_json": True,
|
|
25
|
+
"supports_generators": False,
|
|
26
|
+
"supports_index_clustering": True,
|
|
27
|
+
"supports_interleaved_tables": True,
|
|
28
|
+
}
|
|
29
|
+
return feature_checks.get(feature, False)
|
|
30
|
+
|
|
31
|
+
def get_optimal_type(self, driver: "SyncDriverAdapterBase", type_category: str) -> str:
|
|
32
|
+
_ = driver
|
|
33
|
+
type_map = {
|
|
34
|
+
"json": "JSON",
|
|
35
|
+
"uuid": "BYTES(16)",
|
|
36
|
+
"boolean": "BOOL",
|
|
37
|
+
"timestamp": "TIMESTAMP",
|
|
38
|
+
"text": "STRING(MAX)",
|
|
39
|
+
"blob": "BYTES(MAX)",
|
|
40
|
+
"numeric": "NUMERIC",
|
|
41
|
+
"bignumeric": "NUMERIC",
|
|
42
|
+
"array": "ARRAY",
|
|
43
|
+
}
|
|
44
|
+
return type_map.get(type_category, "STRING(MAX)")
|
|
45
|
+
|
|
46
|
+
def get_tables(self, driver: "SyncDriverAdapterBase", schema: "str | None" = None) -> "list[str]":
|
|
47
|
+
sql = 'SELECT TABLE_NAME AS "table_name" FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = @schema'
|
|
48
|
+
params: dict[str, Any]
|
|
49
|
+
if schema is None:
|
|
50
|
+
sql = "SELECT TABLE_NAME AS \"table_name\" FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = ''"
|
|
51
|
+
params = {}
|
|
52
|
+
else:
|
|
53
|
+
params = {"schema": schema}
|
|
54
|
+
|
|
55
|
+
results = driver.select(sql, params)
|
|
56
|
+
return [cast("str", row["table_name"]) for row in results]
|
|
57
|
+
|
|
58
|
+
def get_columns(
|
|
59
|
+
self, driver: "SyncDriverAdapterBase", table: str, schema: "str | None" = None
|
|
60
|
+
) -> "list[dict[str, Any]]":
|
|
61
|
+
sql = """
|
|
62
|
+
SELECT COLUMN_NAME AS "column_name", SPANNER_TYPE AS "spanner_type", IS_NULLABLE AS "is_nullable"
|
|
63
|
+
FROM INFORMATION_SCHEMA.COLUMNS
|
|
64
|
+
WHERE TABLE_NAME = @table
|
|
65
|
+
"""
|
|
66
|
+
params: dict[str, Any] = {"table": table}
|
|
67
|
+
if schema is not None:
|
|
68
|
+
sql = f"{sql} AND TABLE_SCHEMA = @schema"
|
|
69
|
+
params["schema"] = schema
|
|
70
|
+
else:
|
|
71
|
+
sql = f"{sql} AND TABLE_SCHEMA = ''"
|
|
72
|
+
|
|
73
|
+
results = driver.select(sql, params)
|
|
74
|
+
return [
|
|
75
|
+
{"name": row["column_name"], "type": row["spanner_type"], "nullable": row["is_nullable"] == "YES"}
|
|
76
|
+
for row in results
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
def get_indexes(
|
|
80
|
+
self, driver: "SyncDriverAdapterBase", table: str, schema: "str | None" = None
|
|
81
|
+
) -> "list[dict[str, Any]]":
|
|
82
|
+
sql = """
|
|
83
|
+
SELECT INDEX_NAME AS "index_name", INDEX_TYPE AS "index_type", IS_UNIQUE AS "is_unique"
|
|
84
|
+
FROM INFORMATION_SCHEMA.INDEXES
|
|
85
|
+
WHERE TABLE_NAME = @table
|
|
86
|
+
"""
|
|
87
|
+
params: dict[str, Any] = {"table": table}
|
|
88
|
+
if schema is not None:
|
|
89
|
+
sql = f"{sql} AND TABLE_SCHEMA = @schema"
|
|
90
|
+
params["schema"] = schema
|
|
91
|
+
else:
|
|
92
|
+
sql = f"{sql} AND TABLE_SCHEMA = ''"
|
|
93
|
+
|
|
94
|
+
results = driver.select(sql, params)
|
|
95
|
+
return [{"name": row["index_name"], "type": row["index_type"], "unique": row["is_unique"]} for row in results]
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
r"""Google Cloud Spanner PostgreSQL-interface dialect ("Spangres")."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, cast
|
|
4
|
+
|
|
5
|
+
from sqlglot import exp
|
|
6
|
+
from sqlglot.dialects.postgres import Postgres
|
|
7
|
+
from sqlglot.tokens import TokenType
|
|
8
|
+
|
|
9
|
+
__all__ = ("Spangres",)
|
|
10
|
+
|
|
11
|
+
_ROW_DELETION_NAME = "ROW_DELETION_POLICY"
|
|
12
|
+
_TTL_MIN_COMPONENTS = 2
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Spangres(Postgres):
|
|
16
|
+
"""Spanner PostgreSQL-compatible dialect."""
|
|
17
|
+
|
|
18
|
+
class Parser(Postgres.Parser):
|
|
19
|
+
"""Parse Spanner row deletion policies."""
|
|
20
|
+
|
|
21
|
+
def _parse_property(self) -> exp.Expression:
|
|
22
|
+
if self._match_text_seq("ROW", "DELETION", "POLICY"): # type: ignore[no-untyped-call]
|
|
23
|
+
self._match(TokenType.L_PAREN) # type: ignore[no-untyped-call]
|
|
24
|
+
self._match_text_seq("OLDER_THAN") # type: ignore[no-untyped-call]
|
|
25
|
+
self._match(TokenType.L_PAREN) # type: ignore[no-untyped-call]
|
|
26
|
+
column = cast("exp.Expression", self._parse_id_var())
|
|
27
|
+
self._match(TokenType.COMMA) # type: ignore[no-untyped-call]
|
|
28
|
+
self._match_text_seq("INTERVAL") # type: ignore[no-untyped-call]
|
|
29
|
+
interval = cast("exp.Expression", self._parse_expression())
|
|
30
|
+
self._match(TokenType.R_PAREN) # type: ignore[no-untyped-call]
|
|
31
|
+
self._match(TokenType.R_PAREN) # type: ignore[no-untyped-call]
|
|
32
|
+
|
|
33
|
+
return exp.Property(
|
|
34
|
+
this=exp.Literal.string(_ROW_DELETION_NAME), value=exp.Tuple(expressions=[column, interval])
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
return cast("exp.Expression", super()._parse_property())
|
|
38
|
+
|
|
39
|
+
class Generator(Postgres.Generator):
|
|
40
|
+
"""Generate Spanner row deletion policies."""
|
|
41
|
+
|
|
42
|
+
def property_sql(self, expression: exp.Property) -> str:
|
|
43
|
+
if getattr(expression.this, "name", "").upper() == _ROW_DELETION_NAME:
|
|
44
|
+
values = cast("Any", expression.args.get("value"))
|
|
45
|
+
if values and getattr(values, "expressions", None) and len(values.expressions) >= _TTL_MIN_COMPONENTS:
|
|
46
|
+
column = self.sql(values.expressions[0])
|
|
47
|
+
interval_sql = self.sql(values.expressions[1])
|
|
48
|
+
if not interval_sql.upper().startswith("INTERVAL"):
|
|
49
|
+
interval_sql = f"INTERVAL {interval_sql}"
|
|
50
|
+
return f"ROW DELETION POLICY (OLDER_THAN({column}, {interval_sql}))"
|
|
51
|
+
|
|
52
|
+
return super().property_sql(expression)
|
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"""Google Cloud Spanner SQL dialect (GoogleSQL variant).
|
|
2
|
+
|
|
3
|
+
Extends the BigQuery dialect with Spanner-only DDL features:
|
|
4
|
+
`INTERLEAVE IN PARENT` for interleaved tables and `ROW DELETION POLICY`
|
|
5
|
+
for row-level time-to-live policies (GoogleSQL).
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, cast
|
|
9
|
+
|
|
10
|
+
from sqlglot import exp
|
|
11
|
+
from sqlglot.dialects.bigquery import BigQuery
|
|
12
|
+
from sqlglot.tokens import TokenType
|
|
13
|
+
|
|
14
|
+
__all__ = ("Spanner",)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
_SPANNER_KEYWORDS: dict[str, TokenType] = {}
|
|
18
|
+
interleave_token = getattr(TokenType, "INTERLEAVE", None)
|
|
19
|
+
if interleave_token is not None:
|
|
20
|
+
_SPANNER_KEYWORDS["INTERLEAVE"] = interleave_token
|
|
21
|
+
ttl_token = getattr(TokenType, "TTL", None)
|
|
22
|
+
if ttl_token is not None:
|
|
23
|
+
_SPANNER_KEYWORDS["TTL"] = ttl_token
|
|
24
|
+
|
|
25
|
+
_TTL_MIN_COMPONENTS = 2
|
|
26
|
+
_ROW_DELETION_NAME = "ROW_DELETION_POLICY"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Spanner(BigQuery):
|
|
30
|
+
"""Google Cloud Spanner SQL dialect."""
|
|
31
|
+
|
|
32
|
+
class Tokenizer(BigQuery.Tokenizer):
|
|
33
|
+
"""Tokenizer adds Spanner-only keywords when supported by sqlglot."""
|
|
34
|
+
|
|
35
|
+
KEYWORDS = {**BigQuery.Tokenizer.KEYWORDS, **_SPANNER_KEYWORDS}
|
|
36
|
+
|
|
37
|
+
class Parser(BigQuery.Parser):
|
|
38
|
+
"""Parse Spanner extensions such as INTERLEAVE and row deletion policies."""
|
|
39
|
+
|
|
40
|
+
def _parse_table_parts(
|
|
41
|
+
self, schema: "bool" = False, is_db_reference: "bool" = False, wildcard: "bool" = False
|
|
42
|
+
) -> exp.Table:
|
|
43
|
+
"""Parse Spanner table options including interleaving metadata."""
|
|
44
|
+
table = super()._parse_table_parts(schema=schema, is_db_reference=is_db_reference, wildcard=wildcard)
|
|
45
|
+
|
|
46
|
+
if self._match_text_seq("INTERLEAVE", "IN", "PARENT"): # type: ignore[no-untyped-call]
|
|
47
|
+
parent = cast("exp.Expression", self._parse_table(schema=True, is_db_reference=True))
|
|
48
|
+
on_delete: str | None = None
|
|
49
|
+
|
|
50
|
+
if self._match_text_seq("ON", "DELETE"): # type: ignore[no-untyped-call]
|
|
51
|
+
if self._match_text_seq("CASCADE"): # type: ignore[no-untyped-call]
|
|
52
|
+
on_delete = "CASCADE"
|
|
53
|
+
elif self._match_text_seq("NO", "ACTION"): # type: ignore[no-untyped-call]
|
|
54
|
+
on_delete = "NO ACTION"
|
|
55
|
+
|
|
56
|
+
table.set("interleave_parent", parent)
|
|
57
|
+
if on_delete:
|
|
58
|
+
table.set("interleave_on_delete", on_delete)
|
|
59
|
+
|
|
60
|
+
return table
|
|
61
|
+
|
|
62
|
+
def _parse_property(self) -> exp.Expression:
|
|
63
|
+
"""Parse Spanner row deletion policy or PostgreSQL-style TTL."""
|
|
64
|
+
if self._match_text_seq("ROW", "DELETION", "POLICY"): # type: ignore[no-untyped-call]
|
|
65
|
+
self._match(TokenType.L_PAREN) # type: ignore[no-untyped-call]
|
|
66
|
+
self._match_text_seq("OLDER_THAN") # type: ignore[no-untyped-call]
|
|
67
|
+
self._match(TokenType.L_PAREN) # type: ignore[no-untyped-call]
|
|
68
|
+
column = cast("exp.Expression", self._parse_id_var())
|
|
69
|
+
self._match(TokenType.COMMA) # type: ignore[no-untyped-call]
|
|
70
|
+
self._match_text_seq("INTERVAL") # type: ignore[no-untyped-call]
|
|
71
|
+
interval = cast("exp.Expression", self._parse_expression())
|
|
72
|
+
self._match(TokenType.R_PAREN) # type: ignore[no-untyped-call]
|
|
73
|
+
self._match(TokenType.R_PAREN) # type: ignore[no-untyped-call]
|
|
74
|
+
|
|
75
|
+
return exp.Property(
|
|
76
|
+
this=exp.Literal.string(_ROW_DELETION_NAME), value=exp.Tuple(expressions=[column, interval])
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
if self._match_text_seq("TTL"): # type: ignore[no-untyped-call] # PostgreSQL-dialect style, keep for compatibility
|
|
80
|
+
self._match_text_seq("INTERVAL") # type: ignore[no-untyped-call]
|
|
81
|
+
interval = cast("exp.Expression", self._parse_expression())
|
|
82
|
+
self._match_text_seq("ON") # type: ignore[no-untyped-call]
|
|
83
|
+
column = cast("exp.Expression", self._parse_id_var())
|
|
84
|
+
|
|
85
|
+
return exp.Property(this=exp.Literal.string("TTL"), value=exp.Tuple(expressions=[interval, column]))
|
|
86
|
+
|
|
87
|
+
return cast("exp.Expression", super()._parse_property())
|
|
88
|
+
|
|
89
|
+
class Generator(BigQuery.Generator):
|
|
90
|
+
"""Generate Spanner-specific DDL syntax."""
|
|
91
|
+
|
|
92
|
+
def table_sql(self, expression: exp.Table, sep: str = " ") -> str:
|
|
93
|
+
"""Render INTERLEAVE clause when present on a table expression."""
|
|
94
|
+
sql = super().table_sql(expression, sep=sep)
|
|
95
|
+
|
|
96
|
+
parent = expression.args.get("interleave_parent")
|
|
97
|
+
if parent:
|
|
98
|
+
sql = f"{sql}\nINTERLEAVE IN PARENT {self.sql(parent)}"
|
|
99
|
+
on_delete = expression.args.get("interleave_on_delete")
|
|
100
|
+
if on_delete:
|
|
101
|
+
sql = f"{sql} ON DELETE {on_delete}"
|
|
102
|
+
|
|
103
|
+
return sql
|
|
104
|
+
|
|
105
|
+
def property_sql(self, expression: exp.Property) -> str:
|
|
106
|
+
"""Render row deletion policy or TTL."""
|
|
107
|
+
if getattr(expression.this, "name", "").upper() == _ROW_DELETION_NAME:
|
|
108
|
+
values = cast("Any", expression.args.get("value"))
|
|
109
|
+
if values and getattr(values, "expressions", None) and len(values.expressions) >= _TTL_MIN_COMPONENTS:
|
|
110
|
+
column = self.sql(values.expressions[0])
|
|
111
|
+
interval_sql = self.sql(values.expressions[1])
|
|
112
|
+
if not interval_sql.upper().startswith("INTERVAL"):
|
|
113
|
+
interval_sql = f"INTERVAL {interval_sql}"
|
|
114
|
+
return f"ROW DELETION POLICY (OLDER_THAN({column}, {interval_sql}))"
|
|
115
|
+
|
|
116
|
+
if getattr(expression.this, "name", "").upper() == "TTL":
|
|
117
|
+
values = cast("Any", expression.args.get("value"))
|
|
118
|
+
if values and getattr(values, "expressions", None) and len(values.expressions) >= _TTL_MIN_COMPONENTS:
|
|
119
|
+
interval = self.sql(values.expressions[0])
|
|
120
|
+
column = self.sql(values.expressions[1])
|
|
121
|
+
return f"TTL INTERVAL {interval} ON {column}"
|
|
122
|
+
|
|
123
|
+
return super().property_sql(expression)
|