sqlspec 0.26.0__py3-none-any.whl → 0.28.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +7 -15
- sqlspec/_serialization.py +55 -25
- sqlspec/_typing.py +155 -52
- sqlspec/adapters/adbc/_types.py +1 -1
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +880 -0
- sqlspec/adapters/adbc/config.py +62 -12
- sqlspec/adapters/adbc/data_dictionary.py +74 -2
- sqlspec/adapters/adbc/driver.py +226 -58
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +504 -0
- sqlspec/adapters/adbc/type_converter.py +44 -50
- sqlspec/adapters/aiosqlite/_types.py +1 -1
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +536 -0
- sqlspec/adapters/aiosqlite/config.py +86 -16
- sqlspec/adapters/aiosqlite/data_dictionary.py +34 -2
- sqlspec/adapters/aiosqlite/driver.py +127 -38
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
- sqlspec/adapters/aiosqlite/pool.py +7 -7
- sqlspec/adapters/asyncmy/__init__.py +7 -1
- sqlspec/adapters/asyncmy/_types.py +1 -1
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +503 -0
- sqlspec/adapters/asyncmy/config.py +59 -17
- sqlspec/adapters/asyncmy/data_dictionary.py +41 -2
- sqlspec/adapters/asyncmy/driver.py +293 -62
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +2 -1
- sqlspec/adapters/asyncpg/_type_handlers.py +71 -0
- sqlspec/adapters/asyncpg/_types.py +11 -7
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +460 -0
- sqlspec/adapters/asyncpg/config.py +57 -36
- sqlspec/adapters/asyncpg/data_dictionary.py +48 -2
- sqlspec/adapters/asyncpg/driver.py +153 -23
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +253 -0
- sqlspec/adapters/bigquery/_types.py +1 -1
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +585 -0
- sqlspec/adapters/bigquery/config.py +36 -11
- sqlspec/adapters/bigquery/data_dictionary.py +42 -2
- sqlspec/adapters/bigquery/driver.py +489 -144
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +327 -0
- sqlspec/adapters/bigquery/type_converter.py +55 -23
- sqlspec/adapters/duckdb/_types.py +2 -2
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +563 -0
- sqlspec/adapters/duckdb/config.py +79 -21
- sqlspec/adapters/duckdb/data_dictionary.py +41 -2
- sqlspec/adapters/duckdb/driver.py +225 -44
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +332 -0
- sqlspec/adapters/duckdb/pool.py +5 -5
- sqlspec/adapters/duckdb/type_converter.py +51 -21
- sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
- sqlspec/adapters/oracledb/_types.py +20 -2
- sqlspec/adapters/oracledb/adk/__init__.py +5 -0
- sqlspec/adapters/oracledb/adk/store.py +1628 -0
- sqlspec/adapters/oracledb/config.py +120 -36
- sqlspec/adapters/oracledb/data_dictionary.py +87 -20
- sqlspec/adapters/oracledb/driver.py +475 -86
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +765 -0
- sqlspec/adapters/oracledb/migrations.py +316 -25
- sqlspec/adapters/oracledb/type_converter.py +91 -16
- sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
- sqlspec/adapters/psqlpy/_types.py +2 -1
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +483 -0
- sqlspec/adapters/psqlpy/config.py +45 -19
- sqlspec/adapters/psqlpy/data_dictionary.py +48 -2
- sqlspec/adapters/psqlpy/driver.py +108 -41
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +272 -0
- sqlspec/adapters/psqlpy/type_converter.py +40 -11
- sqlspec/adapters/psycopg/_type_handlers.py +80 -0
- sqlspec/adapters/psycopg/_types.py +2 -1
- sqlspec/adapters/psycopg/adk/__init__.py +5 -0
- sqlspec/adapters/psycopg/adk/store.py +962 -0
- sqlspec/adapters/psycopg/config.py +65 -37
- sqlspec/adapters/psycopg/data_dictionary.py +91 -3
- sqlspec/adapters/psycopg/driver.py +200 -78
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +554 -0
- sqlspec/adapters/sqlite/__init__.py +2 -1
- sqlspec/adapters/sqlite/_type_handlers.py +86 -0
- sqlspec/adapters/sqlite/_types.py +1 -1
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +582 -0
- sqlspec/adapters/sqlite/config.py +85 -16
- sqlspec/adapters/sqlite/data_dictionary.py +34 -2
- sqlspec/adapters/sqlite/driver.py +120 -52
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +318 -0
- sqlspec/adapters/sqlite/pool.py +5 -5
- sqlspec/base.py +45 -26
- sqlspec/builder/__init__.py +73 -4
- sqlspec/builder/_base.py +91 -58
- sqlspec/builder/_column.py +5 -5
- sqlspec/builder/_ddl.py +98 -89
- sqlspec/builder/_delete.py +5 -4
- sqlspec/builder/_dml.py +388 -0
- sqlspec/{_sql.py → builder/_factory.py} +41 -44
- sqlspec/builder/_insert.py +5 -82
- sqlspec/builder/{mixins/_join_operations.py → _join.py} +145 -143
- sqlspec/builder/_merge.py +446 -11
- sqlspec/builder/_parsing_utils.py +9 -11
- sqlspec/builder/_select.py +1313 -25
- sqlspec/builder/_update.py +11 -42
- sqlspec/cli.py +76 -69
- sqlspec/config.py +331 -62
- sqlspec/core/__init__.py +5 -4
- sqlspec/core/cache.py +18 -18
- sqlspec/core/compiler.py +6 -8
- sqlspec/core/filters.py +55 -47
- sqlspec/core/hashing.py +9 -9
- sqlspec/core/parameters.py +76 -45
- sqlspec/core/result.py +234 -47
- sqlspec/core/splitter.py +16 -17
- sqlspec/core/statement.py +32 -31
- sqlspec/core/type_conversion.py +3 -2
- sqlspec/driver/__init__.py +1 -3
- sqlspec/driver/_async.py +183 -160
- sqlspec/driver/_common.py +197 -109
- sqlspec/driver/_sync.py +189 -161
- sqlspec/driver/mixins/_result_tools.py +20 -236
- sqlspec/driver/mixins/_sql_translator.py +4 -4
- sqlspec/exceptions.py +70 -7
- sqlspec/extensions/adk/__init__.py +53 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +181 -0
- sqlspec/extensions/adk/store.py +536 -0
- sqlspec/extensions/aiosql/adapter.py +69 -61
- sqlspec/extensions/fastapi/__init__.py +21 -0
- sqlspec/extensions/fastapi/extension.py +331 -0
- sqlspec/extensions/fastapi/providers.py +543 -0
- sqlspec/extensions/flask/__init__.py +36 -0
- sqlspec/extensions/flask/_state.py +71 -0
- sqlspec/extensions/flask/_utils.py +40 -0
- sqlspec/extensions/flask/extension.py +389 -0
- sqlspec/extensions/litestar/__init__.py +21 -4
- sqlspec/extensions/litestar/cli.py +54 -10
- sqlspec/extensions/litestar/config.py +56 -266
- sqlspec/extensions/litestar/handlers.py +46 -17
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +349 -224
- sqlspec/extensions/litestar/providers.py +25 -25
- sqlspec/extensions/litestar/store.py +265 -0
- sqlspec/extensions/starlette/__init__.py +10 -0
- sqlspec/extensions/starlette/_state.py +25 -0
- sqlspec/extensions/starlette/_utils.py +52 -0
- sqlspec/extensions/starlette/extension.py +254 -0
- sqlspec/extensions/starlette/middleware.py +154 -0
- sqlspec/loader.py +30 -49
- sqlspec/migrations/base.py +200 -76
- sqlspec/migrations/commands.py +591 -62
- sqlspec/migrations/context.py +6 -9
- sqlspec/migrations/fix.py +199 -0
- sqlspec/migrations/loaders.py +47 -19
- sqlspec/migrations/runner.py +241 -75
- sqlspec/migrations/tracker.py +237 -21
- sqlspec/migrations/utils.py +51 -3
- sqlspec/migrations/validation.py +177 -0
- sqlspec/protocols.py +106 -36
- sqlspec/storage/_utils.py +85 -0
- sqlspec/storage/backends/fsspec.py +133 -107
- sqlspec/storage/backends/local.py +78 -51
- sqlspec/storage/backends/obstore.py +276 -168
- sqlspec/storage/registry.py +75 -39
- sqlspec/typing.py +30 -84
- sqlspec/utils/__init__.py +25 -4
- sqlspec/utils/arrow_helpers.py +81 -0
- sqlspec/utils/config_resolver.py +6 -6
- sqlspec/utils/correlation.py +4 -5
- sqlspec/utils/data_transformation.py +3 -2
- sqlspec/utils/deprecation.py +9 -8
- sqlspec/utils/fixtures.py +4 -4
- sqlspec/utils/logging.py +46 -6
- sqlspec/utils/module_loader.py +205 -5
- sqlspec/utils/portal.py +311 -0
- sqlspec/utils/schema.py +288 -0
- sqlspec/utils/serializers.py +113 -4
- sqlspec/utils/sync_tools.py +36 -22
- sqlspec/utils/text.py +1 -2
- sqlspec/utils/type_guards.py +136 -20
- sqlspec/utils/version.py +433 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/METADATA +41 -22
- sqlspec-0.28.0.dist-info/RECORD +221 -0
- sqlspec/builder/mixins/__init__.py +0 -55
- sqlspec/builder/mixins/_cte_and_set_ops.py +0 -253
- sqlspec/builder/mixins/_delete_operations.py +0 -50
- sqlspec/builder/mixins/_insert_operations.py +0 -282
- sqlspec/builder/mixins/_merge_operations.py +0 -698
- sqlspec/builder/mixins/_order_limit_operations.py +0 -145
- sqlspec/builder/mixins/_pivot_operations.py +0 -157
- sqlspec/builder/mixins/_select_operations.py +0 -930
- sqlspec/builder/mixins/_update_operations.py +0 -199
- sqlspec/builder/mixins/_where_clause.py +0 -1298
- sqlspec-0.26.0.dist-info/RECORD +0 -157
- sqlspec-0.26.0.dist-info/licenses/NOTICE +0 -29
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.26.0.dist-info → sqlspec-0.28.0.dist-info}/licenses/LICENSE +0 -0
sqlspec/storage/registry.py
CHANGED
|
@@ -8,33 +8,19 @@ scheme-based routing, and named aliases for common configurations.
|
|
|
8
8
|
import logging
|
|
9
9
|
import re
|
|
10
10
|
from pathlib import Path
|
|
11
|
-
from typing import Any, Final,
|
|
11
|
+
from typing import Any, Final, cast
|
|
12
12
|
|
|
13
13
|
from mypy_extensions import mypyc_attr
|
|
14
14
|
|
|
15
15
|
from sqlspec.exceptions import ImproperConfigurationError, MissingDependencyError
|
|
16
16
|
from sqlspec.protocols import ObjectStoreProtocol
|
|
17
17
|
from sqlspec.typing import FSSPEC_INSTALLED, OBSTORE_INSTALLED
|
|
18
|
+
from sqlspec.utils.type_guards import is_local_path
|
|
18
19
|
|
|
19
20
|
__all__ = ("StorageRegistry", "storage_registry")
|
|
20
21
|
|
|
21
22
|
logger = logging.getLogger(__name__)
|
|
22
23
|
|
|
23
|
-
|
|
24
|
-
def _is_local_uri(uri: str) -> bool:
|
|
25
|
-
"""Check if URI represents a local filesystem path."""
|
|
26
|
-
if "://" in uri and not uri.startswith("file://"):
|
|
27
|
-
return False
|
|
28
|
-
windows_drive_min_length = 3
|
|
29
|
-
return (
|
|
30
|
-
Path(uri).exists()
|
|
31
|
-
or Path(uri).is_absolute()
|
|
32
|
-
or uri.startswith(("~", ".", "/"))
|
|
33
|
-
or (len(uri) >= windows_drive_min_length and uri[1:3] == ":\\")
|
|
34
|
-
or "/" in uri
|
|
35
|
-
)
|
|
36
|
-
|
|
37
|
-
|
|
38
24
|
SCHEME_REGEX: Final = re.compile(r"([a-zA-Z0-9+.-]+)://")
|
|
39
25
|
|
|
40
26
|
|
|
@@ -74,7 +60,7 @@ class StorageRegistry:
|
|
|
74
60
|
def __init__(self) -> None:
|
|
75
61
|
self._alias_configs: dict[str, tuple[type[ObjectStoreProtocol], str, dict[str, Any]]] = {}
|
|
76
62
|
self._aliases: dict[str, dict[str, Any]] = {}
|
|
77
|
-
self._instances: dict[
|
|
63
|
+
self._instances: dict[str | tuple[str, tuple[tuple[str, Any], ...]], ObjectStoreProtocol] = {}
|
|
78
64
|
self._cache: dict[str, tuple[str, type[ObjectStoreProtocol]]] = {}
|
|
79
65
|
|
|
80
66
|
def _make_hashable(self, obj: Any) -> Any:
|
|
@@ -88,7 +74,7 @@ class StorageRegistry:
|
|
|
88
74
|
return obj
|
|
89
75
|
|
|
90
76
|
def register_alias(
|
|
91
|
-
self, alias: str, uri: str, *, backend:
|
|
77
|
+
self, alias: str, uri: str, *, backend: str | None = None, base_path: str = "", **kwargs: Any
|
|
92
78
|
) -> None:
|
|
93
79
|
"""Register a named alias for a storage configuration.
|
|
94
80
|
|
|
@@ -110,9 +96,7 @@ class StorageRegistry:
|
|
|
110
96
|
test_config["uri"] = uri
|
|
111
97
|
self._aliases[alias] = test_config
|
|
112
98
|
|
|
113
|
-
def get(
|
|
114
|
-
self, uri_or_alias: Union[str, Path], *, backend: Optional[str] = None, **kwargs: Any
|
|
115
|
-
) -> ObjectStoreProtocol:
|
|
99
|
+
def get(self, uri_or_alias: str | Path, *, backend: str | None = None, **kwargs: Any) -> ObjectStoreProtocol:
|
|
116
100
|
"""Get backend instance using URI-first routing with automatic backend selection.
|
|
117
101
|
|
|
118
102
|
Args:
|
|
@@ -133,11 +117,15 @@ class StorageRegistry:
|
|
|
133
117
|
if isinstance(uri_or_alias, Path):
|
|
134
118
|
uri_or_alias = f"file://{uri_or_alias.resolve()}"
|
|
135
119
|
|
|
136
|
-
|
|
120
|
+
# Include backend in cache key to ensure different backends for same URI are cached separately
|
|
121
|
+
cache_params = dict(kwargs)
|
|
122
|
+
if backend:
|
|
123
|
+
cache_params["__backend__"] = backend
|
|
124
|
+
cache_key = (uri_or_alias, self._make_hashable(cache_params)) if cache_params else uri_or_alias
|
|
137
125
|
if cache_key in self._instances:
|
|
138
126
|
return self._instances[cache_key]
|
|
139
127
|
scheme = self._get_scheme(uri_or_alias)
|
|
140
|
-
if not scheme and
|
|
128
|
+
if not scheme and is_local_path(uri_or_alias):
|
|
141
129
|
scheme = "file"
|
|
142
130
|
uri_or_alias = f"file://{uri_or_alias}"
|
|
143
131
|
|
|
@@ -154,57 +142,105 @@ class StorageRegistry:
|
|
|
154
142
|
self._instances[cache_key] = instance
|
|
155
143
|
return instance
|
|
156
144
|
|
|
157
|
-
def _resolve_from_uri(
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
145
|
+
def _resolve_from_uri(self, uri: str, *, backend_override: str | None = None, **kwargs: Any) -> ObjectStoreProtocol:
|
|
146
|
+
"""Resolve backend from URI with optional backend override.
|
|
147
|
+
|
|
148
|
+
Backend selection priority for local files (file:// or bare paths):
|
|
149
|
+
1. obstore (if installed) - provides async I/O performance
|
|
150
|
+
2. fsspec (if installed) - async wrapper fallback
|
|
151
|
+
3. local (always available) - zero-dependency sync backend
|
|
152
|
+
|
|
153
|
+
For cloud storage, prefer obstore over fsspec when available.
|
|
154
|
+
|
|
155
|
+
Args:
|
|
156
|
+
uri: Storage URI to resolve.
|
|
157
|
+
backend_override: Force specific backend type.
|
|
158
|
+
**kwargs: Additional backend configuration.
|
|
159
|
+
|
|
160
|
+
Returns:
|
|
161
|
+
Configured backend instance.
|
|
162
|
+
|
|
163
|
+
Raises:
|
|
164
|
+
MissingDependencyError: No backend available for URI scheme.
|
|
165
|
+
"""
|
|
161
166
|
if backend_override:
|
|
162
167
|
return self._create_backend(backend_override, uri, **kwargs)
|
|
168
|
+
|
|
163
169
|
scheme = self._get_scheme(uri)
|
|
164
170
|
|
|
165
|
-
#
|
|
171
|
+
# NEW: Prefer obstore for local files when available
|
|
166
172
|
if scheme in {None, "file"}:
|
|
173
|
+
# Try obstore first for async performance
|
|
174
|
+
if OBSTORE_INSTALLED:
|
|
175
|
+
try:
|
|
176
|
+
return self._create_backend("obstore", uri, **kwargs)
|
|
177
|
+
except (ValueError, ImportError, NotImplementedError):
|
|
178
|
+
pass
|
|
179
|
+
|
|
180
|
+
# Fallback to fsspec if available
|
|
181
|
+
if FSSPEC_INSTALLED:
|
|
182
|
+
try:
|
|
183
|
+
return self._create_backend("fsspec", uri, **kwargs)
|
|
184
|
+
except (ValueError, ImportError, NotImplementedError):
|
|
185
|
+
pass
|
|
186
|
+
|
|
187
|
+
# Final fallback: local zero-dependency backend
|
|
167
188
|
return self._create_backend("local", uri, **kwargs)
|
|
168
189
|
|
|
169
|
-
#
|
|
190
|
+
# For cloud schemes, prefer obstore over fsspec
|
|
170
191
|
if scheme not in FSSPEC_ONLY_SCHEMES and OBSTORE_INSTALLED:
|
|
171
192
|
try:
|
|
172
193
|
return self._create_backend("obstore", uri, **kwargs)
|
|
173
194
|
except (ValueError, ImportError, NotImplementedError):
|
|
174
195
|
pass
|
|
175
196
|
|
|
176
|
-
# Try
|
|
197
|
+
# Try fsspec if available
|
|
177
198
|
if FSSPEC_INSTALLED:
|
|
178
199
|
try:
|
|
179
200
|
return self._create_backend("fsspec", uri, **kwargs)
|
|
180
201
|
except (ValueError, ImportError, NotImplementedError):
|
|
181
202
|
pass
|
|
182
203
|
|
|
183
|
-
#
|
|
204
|
+
# No backend available
|
|
184
205
|
msg = f"No backend available for URI scheme '{scheme}'. Install obstore or fsspec for cloud storage support."
|
|
185
206
|
raise MissingDependencyError(msg)
|
|
186
207
|
|
|
187
208
|
def _determine_backend_class(self, uri: str) -> type[ObjectStoreProtocol]:
|
|
188
|
-
"""Determine the backend class for a URI based on availability.
|
|
209
|
+
"""Determine the backend class for a URI based on availability.
|
|
210
|
+
|
|
211
|
+
Args:
|
|
212
|
+
uri: Storage URI to analyze.
|
|
213
|
+
|
|
214
|
+
Returns:
|
|
215
|
+
Backend class type to use.
|
|
216
|
+
|
|
217
|
+
Raises:
|
|
218
|
+
MissingDependencyError: No backend available for URI scheme.
|
|
219
|
+
"""
|
|
189
220
|
scheme = self._get_scheme(uri)
|
|
190
221
|
|
|
191
|
-
# For local files,
|
|
222
|
+
# NEW: For local files, prefer obstore > fsspec > local
|
|
192
223
|
if scheme in {None, "file"}:
|
|
224
|
+
if OBSTORE_INSTALLED:
|
|
225
|
+
return self._get_backend_class("obstore")
|
|
226
|
+
if FSSPEC_INSTALLED:
|
|
227
|
+
return self._get_backend_class("fsspec")
|
|
193
228
|
return self._get_backend_class("local")
|
|
194
229
|
|
|
195
230
|
# FSSpec-only schemes require FSSpec
|
|
196
|
-
if scheme in FSSPEC_ONLY_SCHEMES
|
|
231
|
+
if scheme in FSSPEC_ONLY_SCHEMES:
|
|
232
|
+
if not FSSPEC_INSTALLED:
|
|
233
|
+
msg = f"Scheme '{scheme}' requires fsspec. Install with: pip install fsspec"
|
|
234
|
+
raise MissingDependencyError(msg)
|
|
197
235
|
return self._get_backend_class("fsspec")
|
|
198
236
|
|
|
199
|
-
#
|
|
237
|
+
# For cloud schemes, prefer obstore
|
|
200
238
|
if OBSTORE_INSTALLED:
|
|
201
239
|
return self._get_backend_class("obstore")
|
|
202
240
|
|
|
203
|
-
# Fall back to FSSpec if available
|
|
204
241
|
if FSSPEC_INSTALLED:
|
|
205
242
|
return self._get_backend_class("fsspec")
|
|
206
243
|
|
|
207
|
-
# For cloud schemes without backends, provide helpful error
|
|
208
244
|
msg = f"No backend available for URI scheme '{scheme}'. Install obstore or fsspec for cloud storage support."
|
|
209
245
|
raise MissingDependencyError(msg)
|
|
210
246
|
|
|
@@ -229,7 +265,7 @@ class StorageRegistry:
|
|
|
229
265
|
"""Create backend instance for URI."""
|
|
230
266
|
return self._get_backend_class(backend_type)(uri, **kwargs)
|
|
231
267
|
|
|
232
|
-
def _get_scheme(self, uri: str) ->
|
|
268
|
+
def _get_scheme(self, uri: str) -> str | None:
|
|
233
269
|
"""Extract the scheme from a URI using regex."""
|
|
234
270
|
if not uri:
|
|
235
271
|
return None
|
|
@@ -244,7 +280,7 @@ class StorageRegistry:
|
|
|
244
280
|
"""List all registered aliases."""
|
|
245
281
|
return list(self._alias_configs.keys())
|
|
246
282
|
|
|
247
|
-
def clear_cache(self, uri_or_alias:
|
|
283
|
+
def clear_cache(self, uri_or_alias: str | None = None) -> None:
|
|
248
284
|
"""Clear resolved backend cache."""
|
|
249
285
|
if uri_or_alias:
|
|
250
286
|
self._instances.pop(uri_or_alias, None)
|
sqlspec/typing.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
# pyright: ignore[reportAttributeAccessIssue]
|
|
2
|
-
from collections.abc import Iterator
|
|
2
|
+
from collections.abc import Iterator
|
|
3
3
|
from functools import lru_cache
|
|
4
|
-
from typing import
|
|
4
|
+
from typing import Annotated, Any, Protocol, TypeAlias, _TypedDict # pyright: ignore
|
|
5
5
|
|
|
6
|
-
from typing_extensions import
|
|
6
|
+
from typing_extensions import TypeVar
|
|
7
7
|
|
|
8
8
|
from sqlspec._typing import (
|
|
9
9
|
AIOSQL_INSTALLED,
|
|
@@ -16,17 +16,22 @@ from sqlspec._typing import (
|
|
|
16
16
|
OBSTORE_INSTALLED,
|
|
17
17
|
OPENTELEMETRY_INSTALLED,
|
|
18
18
|
ORJSON_INSTALLED,
|
|
19
|
+
PANDAS_INSTALLED,
|
|
19
20
|
PGVECTOR_INSTALLED,
|
|
21
|
+
POLARS_INSTALLED,
|
|
20
22
|
PROMETHEUS_INSTALLED,
|
|
21
23
|
PYARROW_INSTALLED,
|
|
22
24
|
PYDANTIC_INSTALLED,
|
|
23
25
|
UNSET,
|
|
24
26
|
AiosqlAsyncProtocol,
|
|
25
27
|
AiosqlParamType,
|
|
26
|
-
AiosqlProtocol,
|
|
27
28
|
AiosqlSQLOperationType,
|
|
28
29
|
AiosqlSyncProtocol,
|
|
29
30
|
ArrowRecordBatch,
|
|
31
|
+
ArrowRecordBatchReader,
|
|
32
|
+
ArrowRecordBatchReaderProtocol,
|
|
33
|
+
ArrowSchema,
|
|
34
|
+
ArrowSchemaProtocol,
|
|
30
35
|
ArrowTable,
|
|
31
36
|
AttrsInstance,
|
|
32
37
|
AttrsInstanceStub,
|
|
@@ -41,6 +46,9 @@ from sqlspec._typing import (
|
|
|
41
46
|
FailFast,
|
|
42
47
|
Gauge,
|
|
43
48
|
Histogram,
|
|
49
|
+
NumpyArray,
|
|
50
|
+
PandasDataFrame,
|
|
51
|
+
PolarsDataFrame,
|
|
44
52
|
Span,
|
|
45
53
|
Status,
|
|
46
54
|
StatusCode,
|
|
@@ -61,9 +69,6 @@ from sqlspec._typing import (
|
|
|
61
69
|
trace,
|
|
62
70
|
)
|
|
63
71
|
|
|
64
|
-
if TYPE_CHECKING:
|
|
65
|
-
from collections.abc import Sequence
|
|
66
|
-
|
|
67
72
|
|
|
68
73
|
class DictLike(Protocol):
|
|
69
74
|
"""A protocol for objects that behave like a dictionary for reading."""
|
|
@@ -87,30 +92,22 @@ PoolT = TypeVar("PoolT")
|
|
|
87
92
|
|
|
88
93
|
:class:`~sqlspec.typing.PoolT`
|
|
89
94
|
"""
|
|
90
|
-
|
|
91
|
-
"""Type variable for
|
|
92
|
-
|
|
93
|
-
:class:`~sqlspec.typing.PoolT_co`
|
|
94
|
-
"""
|
|
95
|
-
ModelT = TypeVar("ModelT", bound="Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub]")
|
|
96
|
-
"""Type variable for model types.
|
|
95
|
+
SchemaT = TypeVar("SchemaT", default=dict[str, Any])
|
|
96
|
+
"""Type variable for schema types (models, TypedDict, dataclasses, etc.).
|
|
97
97
|
|
|
98
|
-
|
|
98
|
+
Unbounded TypeVar for use with schema_type parameter in driver methods.
|
|
99
|
+
Supports all schema types including TypedDict which cannot be bounded to a class hierarchy.
|
|
99
100
|
"""
|
|
100
|
-
RowT = TypeVar("RowT", bound="dict[str, Any]")
|
|
101
|
-
|
|
102
101
|
|
|
103
|
-
DictRow: TypeAlias = "dict[str, Any]"
|
|
104
|
-
"""Type variable for DictRow types."""
|
|
105
|
-
TupleRow: TypeAlias = "tuple[Any, ...]"
|
|
106
|
-
"""Type variable for TupleRow types."""
|
|
107
102
|
|
|
108
|
-
SupportedSchemaModel: TypeAlias =
|
|
103
|
+
SupportedSchemaModel: TypeAlias = (
|
|
104
|
+
DictLike | StructStub | BaseModelStub | DataclassProtocol | AttrsInstanceStub | _TypedDict
|
|
105
|
+
)
|
|
109
106
|
"""Type alias for pydantic or msgspec models.
|
|
110
107
|
|
|
111
108
|
:class:`msgspec.Struct` | :class:`pydantic.BaseModel` | :class:`DataclassProtocol` | :class:`AttrsInstance`
|
|
112
109
|
"""
|
|
113
|
-
StatementParameters: TypeAlias = "
|
|
110
|
+
StatementParameters: TypeAlias = "Any | dict[str, Any] | list[Any] | tuple[Any, ...] | None"
|
|
114
111
|
"""Type alias for statement parameters.
|
|
115
112
|
|
|
116
113
|
Represents:
|
|
@@ -119,40 +116,6 @@ Represents:
|
|
|
119
116
|
- :type:`tuple[Any, ...]`
|
|
120
117
|
- :type:`None`
|
|
121
118
|
"""
|
|
122
|
-
ModelDTOT = TypeVar("ModelDTOT", bound="SupportedSchemaModel")
|
|
123
|
-
"""Type variable for model DTOs.
|
|
124
|
-
|
|
125
|
-
:class:`msgspec.Struct`|:class:`pydantic.BaseModel`
|
|
126
|
-
"""
|
|
127
|
-
PydanticOrMsgspecT = SupportedSchemaModel
|
|
128
|
-
"""Type alias for pydantic or msgspec models.
|
|
129
|
-
|
|
130
|
-
:class:`msgspec.Struct` or :class:`pydantic.BaseModel`
|
|
131
|
-
"""
|
|
132
|
-
ModelDict: TypeAlias = (
|
|
133
|
-
"Union[dict[str, Any], Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub], Any]"
|
|
134
|
-
)
|
|
135
|
-
"""Type alias for model dictionaries.
|
|
136
|
-
|
|
137
|
-
Represents:
|
|
138
|
-
- :type:`dict[str, Any]` | :class:`DataclassProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`
|
|
139
|
-
"""
|
|
140
|
-
ModelDictList: TypeAlias = (
|
|
141
|
-
"Sequence[Union[dict[str, Any], Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub]]]"
|
|
142
|
-
)
|
|
143
|
-
"""Type alias for model dictionary lists.
|
|
144
|
-
|
|
145
|
-
A list or sequence of any of the following:
|
|
146
|
-
- :type:`Sequence`[:type:`dict[str, Any]` | :class:`DataclassProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`]
|
|
147
|
-
|
|
148
|
-
"""
|
|
149
|
-
BulkModelDict: TypeAlias = "Union[Sequence[Union[dict[str, Any], Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub]]], Any]"
|
|
150
|
-
"""Type alias for bulk model dictionaries.
|
|
151
|
-
|
|
152
|
-
Represents:
|
|
153
|
-
- :type:`Sequence`[:type:`dict[str, Any]` | :class:`DataclassProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`]
|
|
154
|
-
- :class:`DTOData`[:type:`list[ModelT]`]
|
|
155
|
-
"""
|
|
156
119
|
|
|
157
120
|
|
|
158
121
|
@lru_cache(typed=True)
|
|
@@ -170,18 +133,6 @@ def get_type_adapter(f: "type[T]") -> Any:
|
|
|
170
133
|
return TypeAdapter(f)
|
|
171
134
|
|
|
172
135
|
|
|
173
|
-
def MixinOf(base: type[T]) -> type[T]: # noqa: N802
|
|
174
|
-
"""Useful function to make mixins with baseclass type hint
|
|
175
|
-
|
|
176
|
-
```
|
|
177
|
-
class StorageMixin(MixinOf(DriverProtocol)): ...
|
|
178
|
-
```
|
|
179
|
-
"""
|
|
180
|
-
if TYPE_CHECKING:
|
|
181
|
-
return base
|
|
182
|
-
return type("<MixinOf>", (base,), {})
|
|
183
|
-
|
|
184
|
-
|
|
185
136
|
__all__ = (
|
|
186
137
|
"AIOSQL_INSTALLED",
|
|
187
138
|
"ATTRS_INSTALLED",
|
|
@@ -193,7 +144,9 @@ __all__ = (
|
|
|
193
144
|
"OBSTORE_INSTALLED",
|
|
194
145
|
"OPENTELEMETRY_INSTALLED",
|
|
195
146
|
"ORJSON_INSTALLED",
|
|
147
|
+
"PANDAS_INSTALLED",
|
|
196
148
|
"PGVECTOR_INSTALLED",
|
|
149
|
+
"POLARS_INSTALLED",
|
|
197
150
|
"PROMETHEUS_INSTALLED",
|
|
198
151
|
"PYARROW_INSTALLED",
|
|
199
152
|
"PYDANTIC_INSTALLED",
|
|
@@ -201,38 +154,32 @@ __all__ = (
|
|
|
201
154
|
"UNSET",
|
|
202
155
|
"AiosqlAsyncProtocol",
|
|
203
156
|
"AiosqlParamType",
|
|
204
|
-
"AiosqlProtocol",
|
|
205
157
|
"AiosqlSQLOperationType",
|
|
206
158
|
"AiosqlSyncProtocol",
|
|
207
159
|
"ArrowRecordBatch",
|
|
160
|
+
"ArrowRecordBatchReader",
|
|
161
|
+
"ArrowRecordBatchReaderProtocol",
|
|
162
|
+
"ArrowSchema",
|
|
163
|
+
"ArrowSchemaProtocol",
|
|
208
164
|
"ArrowTable",
|
|
209
165
|
"AttrsInstance",
|
|
210
166
|
"BaseModel",
|
|
211
|
-
"BulkModelDict",
|
|
212
167
|
"ConnectionT",
|
|
213
168
|
"Counter",
|
|
214
169
|
"DTOData",
|
|
215
170
|
"DataclassProtocol",
|
|
216
171
|
"DictLike",
|
|
217
|
-
"DictRow",
|
|
218
172
|
"Empty",
|
|
219
173
|
"EmptyEnum",
|
|
220
174
|
"EmptyType",
|
|
221
175
|
"FailFast",
|
|
222
176
|
"Gauge",
|
|
223
177
|
"Histogram",
|
|
224
|
-
"
|
|
225
|
-
"
|
|
226
|
-
"
|
|
227
|
-
"ModelDict",
|
|
228
|
-
"ModelDict",
|
|
229
|
-
"ModelDictList",
|
|
230
|
-
"ModelDictList",
|
|
231
|
-
"ModelT",
|
|
178
|
+
"NumpyArray",
|
|
179
|
+
"PandasDataFrame",
|
|
180
|
+
"PolarsDataFrame",
|
|
232
181
|
"PoolT",
|
|
233
|
-
"
|
|
234
|
-
"PydanticOrMsgspecT",
|
|
235
|
-
"RowT",
|
|
182
|
+
"SchemaT",
|
|
236
183
|
"Span",
|
|
237
184
|
"StatementParameters",
|
|
238
185
|
"Status",
|
|
@@ -240,7 +187,6 @@ __all__ = (
|
|
|
240
187
|
"Struct",
|
|
241
188
|
"SupportedSchemaModel",
|
|
242
189
|
"Tracer",
|
|
243
|
-
"TupleRow",
|
|
244
190
|
"TypeAdapter",
|
|
245
191
|
"UnsetType",
|
|
246
192
|
"aiosql",
|
sqlspec/utils/__init__.py
CHANGED
|
@@ -1,10 +1,31 @@
|
|
|
1
1
|
"""Utility functions and classes for SQLSpec.
|
|
2
2
|
|
|
3
3
|
This package provides various utility modules for deprecation handling,
|
|
4
|
-
fixture loading, logging, module loading
|
|
5
|
-
|
|
4
|
+
fixture loading, logging, module loading (including dependency checking),
|
|
5
|
+
portal pattern for async bridging, singleton patterns, sync/async conversion,
|
|
6
|
+
text processing, and type guards.
|
|
6
7
|
"""
|
|
7
8
|
|
|
8
|
-
from sqlspec.utils import
|
|
9
|
+
from sqlspec.utils import (
|
|
10
|
+
deprecation,
|
|
11
|
+
fixtures,
|
|
12
|
+
logging,
|
|
13
|
+
module_loader,
|
|
14
|
+
portal,
|
|
15
|
+
singleton,
|
|
16
|
+
sync_tools,
|
|
17
|
+
text,
|
|
18
|
+
type_guards,
|
|
19
|
+
)
|
|
9
20
|
|
|
10
|
-
__all__ = (
|
|
21
|
+
__all__ = (
|
|
22
|
+
"deprecation",
|
|
23
|
+
"fixtures",
|
|
24
|
+
"logging",
|
|
25
|
+
"module_loader",
|
|
26
|
+
"portal",
|
|
27
|
+
"singleton",
|
|
28
|
+
"sync_tools",
|
|
29
|
+
"text",
|
|
30
|
+
"type_guards",
|
|
31
|
+
)
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
"""Arrow conversion helpers for dict-to-Arrow transformations.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for converting Python dictionaries to Apache Arrow
|
|
4
|
+
format, handling empty results, NULL values, and type inference.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from sqlspec.typing import ArrowRecordBatch, ArrowTable
|
|
11
|
+
|
|
12
|
+
__all__ = ("convert_dict_to_arrow",)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def convert_dict_to_arrow(
|
|
16
|
+
data: "list[dict[str, Any]]",
|
|
17
|
+
return_format: Literal["table", "reader", "batches"] = "table",
|
|
18
|
+
batch_size: int | None = None,
|
|
19
|
+
) -> "ArrowTable | ArrowRecordBatch":
|
|
20
|
+
"""Convert list of dictionaries to Arrow Table or RecordBatch.
|
|
21
|
+
|
|
22
|
+
Handles empty results, NULL values, and automatic type inference.
|
|
23
|
+
Used by adapters that don't have native Arrow support to convert
|
|
24
|
+
dict-based results to Arrow format.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
data: List of dictionaries (one per row).
|
|
28
|
+
return_format: Output format - "table" for Table, "batches" for RecordBatch.
|
|
29
|
+
"reader" is converted to "table" (streaming handled at driver level).
|
|
30
|
+
batch_size: Chunk size for batching (used when return_format="batches").
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
ArrowTable or ArrowRecordBatch depending on return_format.
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
Examples:
|
|
37
|
+
>>> data = [
|
|
38
|
+
... {"id": 1, "name": "Alice"},
|
|
39
|
+
... {"id": 2, "name": "Bob"},
|
|
40
|
+
... ]
|
|
41
|
+
>>> table = convert_dict_to_arrow(data, return_format="table")
|
|
42
|
+
>>> print(table.num_rows)
|
|
43
|
+
2
|
|
44
|
+
|
|
45
|
+
>>> batch = convert_dict_to_arrow(data, return_format="batch")
|
|
46
|
+
>>> print(batch.num_rows)
|
|
47
|
+
2
|
|
48
|
+
"""
|
|
49
|
+
from sqlspec.utils.module_loader import ensure_pyarrow
|
|
50
|
+
|
|
51
|
+
ensure_pyarrow()
|
|
52
|
+
|
|
53
|
+
import pyarrow as pa
|
|
54
|
+
|
|
55
|
+
# Handle empty results
|
|
56
|
+
if not data:
|
|
57
|
+
empty_schema = pa.schema([])
|
|
58
|
+
empty_table = pa.Table.from_pydict({}, schema=empty_schema)
|
|
59
|
+
|
|
60
|
+
if return_format == "batches":
|
|
61
|
+
# Create empty RecordBatch
|
|
62
|
+
batches = empty_table.to_batches()
|
|
63
|
+
return batches[0] if batches else pa.RecordBatch.from_pydict({})
|
|
64
|
+
|
|
65
|
+
return empty_table
|
|
66
|
+
|
|
67
|
+
# Convert list of dicts to columnar format
|
|
68
|
+
# This is more efficient than row-by-row conversion
|
|
69
|
+
columns: dict[str, list[Any]] = {key: [row.get(key) for row in data] for key in data[0]}
|
|
70
|
+
|
|
71
|
+
# Create Arrow Table (auto type inference)
|
|
72
|
+
arrow_table = pa.Table.from_pydict(columns)
|
|
73
|
+
|
|
74
|
+
# Return appropriate format
|
|
75
|
+
if return_format == "batches":
|
|
76
|
+
# Convert table to single RecordBatch
|
|
77
|
+
batches = arrow_table.to_batches()
|
|
78
|
+
return batches[0] if batches else pa.RecordBatch.from_pydict({})
|
|
79
|
+
|
|
80
|
+
# return_format == "table" or "reader" (reader handled at driver level)
|
|
81
|
+
return arrow_table
|
sqlspec/utils/config_resolver.py
CHANGED
|
@@ -7,7 +7,7 @@ Supports both synchronous and asynchronous callable functions.
|
|
|
7
7
|
|
|
8
8
|
import inspect
|
|
9
9
|
from collections.abc import Sequence
|
|
10
|
-
from typing import TYPE_CHECKING, Any,
|
|
10
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
11
11
|
|
|
12
12
|
from sqlspec.exceptions import ConfigResolverError
|
|
13
13
|
from sqlspec.utils.module_loader import import_string
|
|
@@ -21,7 +21,7 @@ __all__ = ("resolve_config_async", "resolve_config_sync")
|
|
|
21
21
|
|
|
22
22
|
async def resolve_config_async(
|
|
23
23
|
config_path: str,
|
|
24
|
-
) -> "
|
|
24
|
+
) -> "list[AsyncDatabaseConfig | SyncDatabaseConfig] | AsyncDatabaseConfig | SyncDatabaseConfig":
|
|
25
25
|
"""Resolve config from dotted path, handling callables and direct instances.
|
|
26
26
|
|
|
27
27
|
This is the async-first version that handles both sync and async callables efficiently.
|
|
@@ -58,7 +58,7 @@ async def resolve_config_async(
|
|
|
58
58
|
|
|
59
59
|
def resolve_config_sync(
|
|
60
60
|
config_path: str,
|
|
61
|
-
) -> "
|
|
61
|
+
) -> "list[AsyncDatabaseConfig | SyncDatabaseConfig] | AsyncDatabaseConfig | SyncDatabaseConfig":
|
|
62
62
|
"""Synchronous wrapper for resolve_config.
|
|
63
63
|
|
|
64
64
|
Args:
|
|
@@ -90,7 +90,7 @@ def resolve_config_sync(
|
|
|
90
90
|
|
|
91
91
|
def _validate_config_result(
|
|
92
92
|
config_result: Any, config_path: str
|
|
93
|
-
) -> "
|
|
93
|
+
) -> "list[AsyncDatabaseConfig | SyncDatabaseConfig] | AsyncDatabaseConfig | SyncDatabaseConfig":
|
|
94
94
|
"""Validate that the config result is a valid config or list of configs.
|
|
95
95
|
|
|
96
96
|
Args:
|
|
@@ -117,13 +117,13 @@ def _validate_config_result(
|
|
|
117
117
|
msg = f"Config '{config_path}' returned invalid config at index {i}. Expected database config instance."
|
|
118
118
|
raise ConfigResolverError(msg)
|
|
119
119
|
|
|
120
|
-
return cast("list[
|
|
120
|
+
return cast("list[AsyncDatabaseConfig | SyncDatabaseConfig]", list(config_result))
|
|
121
121
|
|
|
122
122
|
if not _is_valid_config(config_result):
|
|
123
123
|
msg = f"Config '{config_path}' returned invalid type '{type(config_result).__name__}'. Expected database config instance or list."
|
|
124
124
|
raise ConfigResolverError(msg)
|
|
125
125
|
|
|
126
|
-
return cast("
|
|
126
|
+
return cast("AsyncDatabaseConfig | SyncDatabaseConfig", config_result)
|
|
127
127
|
|
|
128
128
|
|
|
129
129
|
def _is_valid_config(config: Any) -> bool:
|
sqlspec/utils/correlation.py
CHANGED
|
@@ -4,15 +4,14 @@ This module provides utilities for tracking correlation IDs across
|
|
|
4
4
|
database operations, enabling distributed tracing and debugging.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
-
from __future__ import annotations
|
|
8
|
-
|
|
9
7
|
import uuid
|
|
8
|
+
from collections.abc import Generator
|
|
10
9
|
from contextlib import contextmanager
|
|
11
10
|
from contextvars import ContextVar
|
|
12
11
|
from typing import TYPE_CHECKING, Any
|
|
13
12
|
|
|
14
13
|
if TYPE_CHECKING:
|
|
15
|
-
from collections.abc import
|
|
14
|
+
from collections.abc import MutableMapping
|
|
16
15
|
from logging import LoggerAdapter
|
|
17
16
|
|
|
18
17
|
__all__ = ("CorrelationContext", "correlation_context", "get_correlation_adapter")
|
|
@@ -115,7 +114,7 @@ def correlation_context(correlation_id: str | None = None) -> Generator[str, Non
|
|
|
115
114
|
yield cid
|
|
116
115
|
|
|
117
116
|
|
|
118
|
-
def get_correlation_adapter(logger: Any) -> LoggerAdapter:
|
|
117
|
+
def get_correlation_adapter(logger: Any) -> "LoggerAdapter":
|
|
119
118
|
"""Get a logger adapter that automatically includes correlation ID.
|
|
120
119
|
|
|
121
120
|
Args:
|
|
@@ -129,7 +128,7 @@ def get_correlation_adapter(logger: Any) -> LoggerAdapter:
|
|
|
129
128
|
class CorrelationAdapter(LoggerAdapter):
|
|
130
129
|
"""Logger adapter that adds correlation ID to all logs."""
|
|
131
130
|
|
|
132
|
-
def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, dict[str, Any]]:
|
|
131
|
+
def process(self, msg: str, kwargs: "MutableMapping[str, Any]") -> tuple[str, dict[str, Any]]:
|
|
133
132
|
"""Add correlation ID to the log record.
|
|
134
133
|
|
|
135
134
|
Args:
|
|
@@ -5,7 +5,8 @@ field name conversion when mapping database results to schema objects.
|
|
|
5
5
|
Used primarily for msgspec field name conversion with rename configurations.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
from
|
|
8
|
+
from collections.abc import Callable
|
|
9
|
+
from typing import Any
|
|
9
10
|
|
|
10
11
|
__all__ = ("transform_dict_keys",)
|
|
11
12
|
|
|
@@ -30,7 +31,7 @@ def _safe_convert_key(key: Any, converter: Callable[[str], str]) -> Any:
|
|
|
30
31
|
return key
|
|
31
32
|
|
|
32
33
|
|
|
33
|
-
def transform_dict_keys(data:
|
|
34
|
+
def transform_dict_keys(data: dict | list | Any, converter: Callable[[str], str]) -> dict | list | Any:
|
|
34
35
|
"""Transform dictionary keys using the provided converter function.
|
|
35
36
|
|
|
36
37
|
Recursively transforms all dictionary keys in a data structure using
|