sqlspec 0.16.1__cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sqlspec might be problematic. Click here for more details.

Files changed (148) hide show
  1. 51ff5a9eadfdefd49f98__mypyc.cpython-310-aarch64-linux-gnu.so +0 -0
  2. sqlspec/__init__.py +92 -0
  3. sqlspec/__main__.py +12 -0
  4. sqlspec/__metadata__.py +14 -0
  5. sqlspec/_serialization.py +77 -0
  6. sqlspec/_sql.py +1780 -0
  7. sqlspec/_typing.py +680 -0
  8. sqlspec/adapters/__init__.py +0 -0
  9. sqlspec/adapters/adbc/__init__.py +5 -0
  10. sqlspec/adapters/adbc/_types.py +12 -0
  11. sqlspec/adapters/adbc/config.py +361 -0
  12. sqlspec/adapters/adbc/driver.py +512 -0
  13. sqlspec/adapters/aiosqlite/__init__.py +19 -0
  14. sqlspec/adapters/aiosqlite/_types.py +13 -0
  15. sqlspec/adapters/aiosqlite/config.py +253 -0
  16. sqlspec/adapters/aiosqlite/driver.py +248 -0
  17. sqlspec/adapters/asyncmy/__init__.py +19 -0
  18. sqlspec/adapters/asyncmy/_types.py +12 -0
  19. sqlspec/adapters/asyncmy/config.py +180 -0
  20. sqlspec/adapters/asyncmy/driver.py +274 -0
  21. sqlspec/adapters/asyncpg/__init__.py +21 -0
  22. sqlspec/adapters/asyncpg/_types.py +17 -0
  23. sqlspec/adapters/asyncpg/config.py +229 -0
  24. sqlspec/adapters/asyncpg/driver.py +344 -0
  25. sqlspec/adapters/bigquery/__init__.py +18 -0
  26. sqlspec/adapters/bigquery/_types.py +12 -0
  27. sqlspec/adapters/bigquery/config.py +298 -0
  28. sqlspec/adapters/bigquery/driver.py +558 -0
  29. sqlspec/adapters/duckdb/__init__.py +22 -0
  30. sqlspec/adapters/duckdb/_types.py +12 -0
  31. sqlspec/adapters/duckdb/config.py +504 -0
  32. sqlspec/adapters/duckdb/driver.py +368 -0
  33. sqlspec/adapters/oracledb/__init__.py +32 -0
  34. sqlspec/adapters/oracledb/_types.py +14 -0
  35. sqlspec/adapters/oracledb/config.py +317 -0
  36. sqlspec/adapters/oracledb/driver.py +538 -0
  37. sqlspec/adapters/psqlpy/__init__.py +16 -0
  38. sqlspec/adapters/psqlpy/_types.py +11 -0
  39. sqlspec/adapters/psqlpy/config.py +214 -0
  40. sqlspec/adapters/psqlpy/driver.py +530 -0
  41. sqlspec/adapters/psycopg/__init__.py +32 -0
  42. sqlspec/adapters/psycopg/_types.py +17 -0
  43. sqlspec/adapters/psycopg/config.py +426 -0
  44. sqlspec/adapters/psycopg/driver.py +796 -0
  45. sqlspec/adapters/sqlite/__init__.py +15 -0
  46. sqlspec/adapters/sqlite/_types.py +11 -0
  47. sqlspec/adapters/sqlite/config.py +240 -0
  48. sqlspec/adapters/sqlite/driver.py +294 -0
  49. sqlspec/base.py +571 -0
  50. sqlspec/builder/__init__.py +62 -0
  51. sqlspec/builder/_base.py +473 -0
  52. sqlspec/builder/_column.py +320 -0
  53. sqlspec/builder/_ddl.py +1346 -0
  54. sqlspec/builder/_ddl_utils.py +103 -0
  55. sqlspec/builder/_delete.py +76 -0
  56. sqlspec/builder/_insert.py +256 -0
  57. sqlspec/builder/_merge.py +71 -0
  58. sqlspec/builder/_parsing_utils.py +140 -0
  59. sqlspec/builder/_select.py +170 -0
  60. sqlspec/builder/_update.py +188 -0
  61. sqlspec/builder/mixins/__init__.py +55 -0
  62. sqlspec/builder/mixins/_cte_and_set_ops.py +222 -0
  63. sqlspec/builder/mixins/_delete_operations.py +41 -0
  64. sqlspec/builder/mixins/_insert_operations.py +244 -0
  65. sqlspec/builder/mixins/_join_operations.py +122 -0
  66. sqlspec/builder/mixins/_merge_operations.py +476 -0
  67. sqlspec/builder/mixins/_order_limit_operations.py +135 -0
  68. sqlspec/builder/mixins/_pivot_operations.py +153 -0
  69. sqlspec/builder/mixins/_select_operations.py +603 -0
  70. sqlspec/builder/mixins/_update_operations.py +187 -0
  71. sqlspec/builder/mixins/_where_clause.py +621 -0
  72. sqlspec/cli.py +247 -0
  73. sqlspec/config.py +395 -0
  74. sqlspec/core/__init__.py +63 -0
  75. sqlspec/core/cache.cpython-310-aarch64-linux-gnu.so +0 -0
  76. sqlspec/core/cache.py +871 -0
  77. sqlspec/core/compiler.cpython-310-aarch64-linux-gnu.so +0 -0
  78. sqlspec/core/compiler.py +417 -0
  79. sqlspec/core/filters.cpython-310-aarch64-linux-gnu.so +0 -0
  80. sqlspec/core/filters.py +830 -0
  81. sqlspec/core/hashing.cpython-310-aarch64-linux-gnu.so +0 -0
  82. sqlspec/core/hashing.py +310 -0
  83. sqlspec/core/parameters.cpython-310-aarch64-linux-gnu.so +0 -0
  84. sqlspec/core/parameters.py +1237 -0
  85. sqlspec/core/result.cpython-310-aarch64-linux-gnu.so +0 -0
  86. sqlspec/core/result.py +677 -0
  87. sqlspec/core/splitter.cpython-310-aarch64-linux-gnu.so +0 -0
  88. sqlspec/core/splitter.py +819 -0
  89. sqlspec/core/statement.cpython-310-aarch64-linux-gnu.so +0 -0
  90. sqlspec/core/statement.py +676 -0
  91. sqlspec/driver/__init__.py +19 -0
  92. sqlspec/driver/_async.py +502 -0
  93. sqlspec/driver/_common.py +631 -0
  94. sqlspec/driver/_sync.py +503 -0
  95. sqlspec/driver/mixins/__init__.py +6 -0
  96. sqlspec/driver/mixins/_result_tools.py +193 -0
  97. sqlspec/driver/mixins/_sql_translator.py +86 -0
  98. sqlspec/exceptions.py +193 -0
  99. sqlspec/extensions/__init__.py +0 -0
  100. sqlspec/extensions/aiosql/__init__.py +10 -0
  101. sqlspec/extensions/aiosql/adapter.py +461 -0
  102. sqlspec/extensions/litestar/__init__.py +6 -0
  103. sqlspec/extensions/litestar/_utils.py +52 -0
  104. sqlspec/extensions/litestar/cli.py +48 -0
  105. sqlspec/extensions/litestar/config.py +92 -0
  106. sqlspec/extensions/litestar/handlers.py +260 -0
  107. sqlspec/extensions/litestar/plugin.py +145 -0
  108. sqlspec/extensions/litestar/providers.py +454 -0
  109. sqlspec/loader.cpython-310-aarch64-linux-gnu.so +0 -0
  110. sqlspec/loader.py +760 -0
  111. sqlspec/migrations/__init__.py +35 -0
  112. sqlspec/migrations/base.py +414 -0
  113. sqlspec/migrations/commands.py +443 -0
  114. sqlspec/migrations/loaders.py +402 -0
  115. sqlspec/migrations/runner.py +213 -0
  116. sqlspec/migrations/tracker.py +140 -0
  117. sqlspec/migrations/utils.py +129 -0
  118. sqlspec/protocols.py +407 -0
  119. sqlspec/py.typed +0 -0
  120. sqlspec/storage/__init__.py +23 -0
  121. sqlspec/storage/backends/__init__.py +0 -0
  122. sqlspec/storage/backends/base.py +163 -0
  123. sqlspec/storage/backends/fsspec.py +386 -0
  124. sqlspec/storage/backends/obstore.py +459 -0
  125. sqlspec/storage/capabilities.py +102 -0
  126. sqlspec/storage/registry.py +239 -0
  127. sqlspec/typing.py +299 -0
  128. sqlspec/utils/__init__.py +3 -0
  129. sqlspec/utils/correlation.py +150 -0
  130. sqlspec/utils/deprecation.py +106 -0
  131. sqlspec/utils/fixtures.cpython-310-aarch64-linux-gnu.so +0 -0
  132. sqlspec/utils/fixtures.py +58 -0
  133. sqlspec/utils/logging.py +127 -0
  134. sqlspec/utils/module_loader.py +89 -0
  135. sqlspec/utils/serializers.py +4 -0
  136. sqlspec/utils/singleton.py +32 -0
  137. sqlspec/utils/sync_tools.cpython-310-aarch64-linux-gnu.so +0 -0
  138. sqlspec/utils/sync_tools.py +237 -0
  139. sqlspec/utils/text.cpython-310-aarch64-linux-gnu.so +0 -0
  140. sqlspec/utils/text.py +96 -0
  141. sqlspec/utils/type_guards.cpython-310-aarch64-linux-gnu.so +0 -0
  142. sqlspec/utils/type_guards.py +1139 -0
  143. sqlspec-0.16.1.dist-info/METADATA +365 -0
  144. sqlspec-0.16.1.dist-info/RECORD +148 -0
  145. sqlspec-0.16.1.dist-info/WHEEL +7 -0
  146. sqlspec-0.16.1.dist-info/entry_points.txt +2 -0
  147. sqlspec-0.16.1.dist-info/licenses/LICENSE +21 -0
  148. sqlspec-0.16.1.dist-info/licenses/NOTICE +29 -0
@@ -0,0 +1,239 @@
1
+ """Unified Storage Registry for ObjectStore backends.
2
+
3
+ Provides a flexible, lazy-loading storage registry that supports URI-first access
4
+ pattern with automatic backend detection, ObStore preferred with FSSpec fallback,
5
+ intelligent scheme-based routing, and named aliases for common configurations.
6
+ """
7
+
8
+ import logging
9
+ import re
10
+ from pathlib import Path
11
+ from typing import Any, Final, Optional, Union, cast
12
+
13
+ from mypy_extensions import mypyc_attr
14
+
15
+ from sqlspec.exceptions import ImproperConfigurationError, MissingDependencyError
16
+ from sqlspec.protocols import ObjectStoreProtocol
17
+ from sqlspec.storage.capabilities import StorageCapabilities
18
+ from sqlspec.typing import FSSPEC_INSTALLED, OBSTORE_INSTALLED
19
+
20
+ __all__ = ("StorageRegistry", "storage_registry")
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ SCHEME_REGEX: Final = re.compile(r"([a-zA-Z0-9+.-]+)://")
26
+ FILE_PROTOCOL: Final[str] = "file"
27
+ S3_PROTOCOL: Final[str] = "s3"
28
+ GCS_PROTOCOL: Final[str] = "gs"
29
+ AZURE_PROTOCOL: Final[str] = "az"
30
+ FSSPEC_ONLY_SCHEMES: Final[frozenset[str]] = frozenset({"http", "https", "ftp", "sftp", "ssh"})
31
+
32
+
33
+ @mypyc_attr(allow_interpreted_subclasses=True)
34
+ class StorageRegistry:
35
+ """Storage registry with URI-first access and automatic backend selection.
36
+
37
+ Provides URI-first access pattern with automatic backend selection.
38
+ Named aliases support complex configurations.
39
+
40
+ Examples:
41
+ backend = registry.get("s3://my-bucket/file.parquet")
42
+ backend = registry.get("file:///tmp/data.csv")
43
+ backend = registry.get("gs://bucket/data.json")
44
+
45
+ registry.register_alias(
46
+ "production-s3",
47
+ uri="s3://prod-bucket/data",
48
+ base_path="sqlspec",
49
+ aws_access_key_id="...",
50
+ aws_secret_access_key="..."
51
+ )
52
+ backend = registry.get("production-s3")
53
+ """
54
+
55
+ __slots__ = ("_alias_configs", "_aliases", "_cache", "_instances")
56
+
57
+ def __init__(self) -> None:
58
+ self._alias_configs: dict[str, tuple[type[ObjectStoreProtocol], str, dict[str, Any]]] = {}
59
+ self._aliases: dict[str, dict[str, Any]] = {}
60
+ self._instances: dict[Union[str, tuple[str, tuple[tuple[str, Any], ...]]], ObjectStoreProtocol] = {}
61
+ self._cache: dict[str, tuple[str, type[ObjectStoreProtocol]]] = {}
62
+
63
+ def register_alias(
64
+ self,
65
+ alias: str,
66
+ uri: str,
67
+ *,
68
+ backend: Optional[type[ObjectStoreProtocol]] = None,
69
+ base_path: str = "",
70
+ config: Optional[dict[str, Any]] = None,
71
+ **kwargs: Any,
72
+ ) -> None:
73
+ """Register a named alias for a storage configuration.
74
+
75
+ Args:
76
+ alias: Unique alias name for the configuration
77
+ uri: Storage URI (e.g., "s3://bucket", "file:///path")
78
+ backend: Backend class to use (auto-detected from URI if not provided)
79
+ base_path: Base path to prepend to all operations
80
+ config: Additional configuration dict
81
+ **kwargs: Backend-specific configuration options
82
+ """
83
+ if backend is None:
84
+ backend = self._determine_backend_class(uri)
85
+
86
+ config = config or {}
87
+ config.update(kwargs)
88
+ backend_config = dict(config)
89
+ if base_path:
90
+ backend_config["base_path"] = base_path
91
+ self._alias_configs[alias] = (backend, uri, backend_config)
92
+ test_config = dict(backend_config)
93
+ test_config["uri"] = uri
94
+ self._aliases[alias] = test_config
95
+
96
+ def get(self, uri_or_alias: Union[str, Path], **kwargs: Any) -> ObjectStoreProtocol:
97
+ """Get backend instance using URI-first routing with automatic backend selection.
98
+
99
+ Args:
100
+ uri_or_alias: URI to resolve directly OR named alias
101
+ **kwargs: Additional backend-specific configuration options
102
+
103
+ Returns:
104
+ Backend instance with automatic backend selection
105
+
106
+ Raises:
107
+ ImproperConfigurationError: If alias not found or invalid input
108
+ """
109
+ if not uri_or_alias:
110
+ msg = "URI or alias cannot be empty."
111
+ raise ImproperConfigurationError(msg)
112
+
113
+ if isinstance(uri_or_alias, Path):
114
+ uri_or_alias = f"file://{uri_or_alias.resolve()}"
115
+
116
+ cache_key = (uri_or_alias, tuple(sorted(kwargs.items()))) if kwargs else uri_or_alias
117
+ if cache_key in self._instances:
118
+ return self._instances[cache_key]
119
+ scheme = self._get_scheme(uri_or_alias)
120
+ if not scheme and (
121
+ Path(uri_or_alias).exists()
122
+ or Path(uri_or_alias).is_absolute()
123
+ or uri_or_alias.startswith(("~", "."))
124
+ or ":\\" in uri_or_alias
125
+ or "/" in uri_or_alias
126
+ ):
127
+ scheme = "file"
128
+ uri_or_alias = f"file://{uri_or_alias}"
129
+
130
+ if scheme:
131
+ instance = self._resolve_from_uri(uri_or_alias, **kwargs)
132
+ elif uri_or_alias in self._alias_configs:
133
+ backend_cls, stored_uri, config = self._alias_configs[uri_or_alias]
134
+ instance = backend_cls(stored_uri, **{**config, **kwargs})
135
+ else:
136
+ msg = f"Unknown storage alias or invalid URI: '{uri_or_alias}'"
137
+ raise ImproperConfigurationError(msg)
138
+ self._instances[cache_key] = instance
139
+ return instance
140
+
141
+ def _resolve_from_uri(self, uri: str, **kwargs: Any) -> ObjectStoreProtocol:
142
+ """Resolve backend from URI, trying ObStore first, then FSSpec."""
143
+ scheme = self._get_scheme(uri)
144
+ if scheme not in FSSPEC_ONLY_SCHEMES and OBSTORE_INSTALLED:
145
+ try:
146
+ return self._create_backend("obstore", uri, **kwargs)
147
+ except (ValueError, ImportError, NotImplementedError):
148
+ pass
149
+ if FSSPEC_INSTALLED:
150
+ try:
151
+ return self._create_backend("fsspec", uri, **kwargs)
152
+ except (ValueError, ImportError, NotImplementedError):
153
+ pass
154
+ msg = f"No storage backend available for scheme '{scheme}'. Install obstore or fsspec."
155
+ raise MissingDependencyError(msg)
156
+
157
+ def _determine_backend_class(self, uri: str) -> type[ObjectStoreProtocol]:
158
+ """Determine the backend class for a URI based on availability."""
159
+ scheme = self._get_scheme(uri)
160
+ if scheme in FSSPEC_ONLY_SCHEMES and FSSPEC_INSTALLED:
161
+ return self._get_backend_class("fsspec")
162
+ if OBSTORE_INSTALLED:
163
+ return self._get_backend_class("obstore")
164
+ if FSSPEC_INSTALLED:
165
+ return self._get_backend_class("fsspec")
166
+ msg = f"No backend available for URI scheme '{scheme}'. Install obstore or fsspec."
167
+ raise MissingDependencyError(msg)
168
+
169
+ def _get_backend_class(self, backend_type: str) -> type[ObjectStoreProtocol]:
170
+ """Get backend class by type name."""
171
+ if backend_type == "obstore":
172
+ from sqlspec.storage.backends.obstore import ObStoreBackend
173
+
174
+ return cast("type[ObjectStoreProtocol]", ObStoreBackend)
175
+ if backend_type == "fsspec":
176
+ from sqlspec.storage.backends.fsspec import FSSpecBackend
177
+
178
+ return cast("type[ObjectStoreProtocol]", FSSpecBackend)
179
+ msg = f"Unknown backend type: {backend_type}. Supported types: 'obstore', 'fsspec'"
180
+ raise ValueError(msg)
181
+
182
+ def _create_backend(self, backend_type: str, uri: str, **kwargs: Any) -> ObjectStoreProtocol:
183
+ """Create backend instance for URI."""
184
+ return self._get_backend_class(backend_type)(uri, **kwargs)
185
+
186
+ def _get_scheme(self, uri: str) -> Optional[str]:
187
+ """Extract the scheme from a URI using regex."""
188
+ if not uri:
189
+ return None
190
+ match = SCHEME_REGEX.match(uri)
191
+ return match.group(1).lower() if match else None
192
+
193
+ def is_alias_registered(self, alias: str) -> bool:
194
+ """Check if a named alias is registered."""
195
+ return alias in self._alias_configs
196
+
197
+ def list_aliases(self) -> list[str]:
198
+ """List all registered aliases."""
199
+ return list(self._alias_configs.keys())
200
+
201
+ def clear_cache(self, uri_or_alias: Optional[str] = None) -> None:
202
+ """Clear resolved backend cache."""
203
+ if uri_or_alias:
204
+ self._instances.pop(uri_or_alias, None)
205
+ else:
206
+ self._instances.clear()
207
+
208
+ def clear(self) -> None:
209
+ """Clear all aliases and instances."""
210
+ self._alias_configs.clear()
211
+ self._aliases.clear()
212
+ self._instances.clear()
213
+
214
+ def clear_instances(self) -> None:
215
+ """Clear only cached instances, keeping aliases."""
216
+ self._instances.clear()
217
+
218
+ def clear_aliases(self) -> None:
219
+ """Clear only aliases, keeping cached instances."""
220
+ self._alias_configs.clear()
221
+ self._aliases.clear()
222
+
223
+ def get_backend_capabilities(self, uri_or_alias: Union[str, Path]) -> "StorageCapabilities":
224
+ """Get capabilities for a backend without creating an instance."""
225
+ if isinstance(uri_or_alias, Path):
226
+ uri_or_alias = f"file://{uri_or_alias.resolve()}"
227
+ if "://" in uri_or_alias:
228
+ backend_cls = self._determine_backend_class(uri_or_alias)
229
+ elif uri_or_alias in self._alias_configs:
230
+ backend_cls, _, _ = self._alias_configs[uri_or_alias]
231
+ else:
232
+ msg = f"Unknown storage alias or invalid URI: '{uri_or_alias}'"
233
+ raise ImproperConfigurationError(msg)
234
+ if hasattr(backend_cls, "capabilities"):
235
+ return backend_cls.capabilities
236
+ return StorageCapabilities()
237
+
238
+
239
+ storage_registry = StorageRegistry()
sqlspec/typing.py ADDED
@@ -0,0 +1,299 @@
1
+ # pyright: ignore[reportAttributeAccessIssue]
2
+ from collections.abc import Iterator, Mapping
3
+ from functools import lru_cache
4
+ from typing import TYPE_CHECKING, Annotated, Any, Protocol, Union
5
+
6
+ from typing_extensions import TypeAlias, TypeVar
7
+
8
+ from sqlspec._typing import (
9
+ AIOSQL_INSTALLED,
10
+ ATTRS_INSTALLED,
11
+ CATTRS_INSTALLED,
12
+ FSSPEC_INSTALLED,
13
+ LITESTAR_INSTALLED,
14
+ MSGSPEC_INSTALLED,
15
+ OBSTORE_INSTALLED,
16
+ OPENTELEMETRY_INSTALLED,
17
+ PGVECTOR_INSTALLED,
18
+ PROMETHEUS_INSTALLED,
19
+ PYARROW_INSTALLED,
20
+ PYDANTIC_INSTALLED,
21
+ DataclassProtocol,
22
+ Empty,
23
+ EmptyEnum,
24
+ EmptyType,
25
+ )
26
+
27
+ if TYPE_CHECKING:
28
+ from collections.abc import Sequence
29
+
30
+ from sqlspec._typing import (
31
+ UNSET,
32
+ AiosqlAsyncProtocol,
33
+ AiosqlParamType,
34
+ AiosqlProtocol,
35
+ AiosqlSQLOperationType,
36
+ AiosqlSyncProtocol,
37
+ ArrowRecordBatch,
38
+ ArrowTable,
39
+ AttrsInstance,
40
+ AttrsInstanceStub,
41
+ BaseModel,
42
+ BaseModelStub,
43
+ Counter,
44
+ DTOData,
45
+ FailFast,
46
+ Gauge,
47
+ Histogram,
48
+ Span,
49
+ Status,
50
+ StatusCode,
51
+ Struct,
52
+ StructStub,
53
+ Tracer,
54
+ TypeAdapter,
55
+ UnsetType,
56
+ aiosql,
57
+ attrs_asdict,
58
+ attrs_define,
59
+ attrs_field,
60
+ attrs_fields,
61
+ attrs_has,
62
+ cattrs_structure,
63
+ cattrs_unstructure,
64
+ convert,
65
+ trace,
66
+ )
67
+ else:
68
+ from sqlspec._typing import (
69
+ UNSET,
70
+ AiosqlAsyncProtocol,
71
+ AiosqlParamType,
72
+ AiosqlProtocol,
73
+ AiosqlSQLOperationType,
74
+ AiosqlSyncProtocol,
75
+ ArrowRecordBatch,
76
+ ArrowTable,
77
+ AttrsInstance,
78
+ BaseModel,
79
+ Counter,
80
+ DTOData,
81
+ FailFast,
82
+ Gauge,
83
+ Histogram,
84
+ Span,
85
+ Status,
86
+ StatusCode,
87
+ Struct,
88
+ Tracer,
89
+ TypeAdapter,
90
+ UnsetType,
91
+ aiosql,
92
+ attrs_asdict,
93
+ attrs_define,
94
+ attrs_field,
95
+ attrs_fields,
96
+ attrs_has,
97
+ cattrs_structure,
98
+ cattrs_unstructure,
99
+ convert,
100
+ trace,
101
+ )
102
+
103
+
104
+ class DictLike(Protocol):
105
+ """A protocol for objects that behave like a dictionary for reading."""
106
+
107
+ def __getitem__(self, key: str) -> Any: ...
108
+ def __iter__(self) -> Iterator[str]: ...
109
+ def __len__(self) -> int: ...
110
+
111
+
112
+ PYDANTIC_USE_FAILFAST = False
113
+
114
+
115
+ if TYPE_CHECKING:
116
+ T = TypeVar("T")
117
+ ConnectionT = TypeVar("ConnectionT")
118
+ """Type variable for connection types.
119
+
120
+ :class:`~sqlspec.typing.ConnectionT`
121
+ """
122
+ PoolT = TypeVar("PoolT")
123
+ """Type variable for pool types.
124
+
125
+ :class:`~sqlspec.typing.PoolT`
126
+ """
127
+ PoolT_co = TypeVar("PoolT_co", covariant=True)
128
+ """Type variable for covariant pool types.
129
+
130
+ :class:`~sqlspec.typing.PoolT_co`
131
+ """
132
+ ModelT = TypeVar("ModelT", bound="Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub]")
133
+ """Type variable for model types.
134
+
135
+ :class:`DictLike` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel` | :class:`DataclassProtocol` | :class:`AttrsInstance`
136
+ """
137
+ RowT = TypeVar("RowT", bound="dict[str, Any]")
138
+ else:
139
+ T = Any
140
+ ConnectionT = Any
141
+ PoolT = Any
142
+ PoolT_co = Any
143
+ ModelT = Any
144
+ RowT = dict[str, Any]
145
+
146
+
147
+ DictRow: TypeAlias = "dict[str, Any]"
148
+ """Type variable for DictRow types."""
149
+ TupleRow: TypeAlias = "tuple[Any, ...]"
150
+ """Type variable for TupleRow types."""
151
+
152
+ SupportedSchemaModel: TypeAlias = "Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub]"
153
+ """Type alias for pydantic or msgspec models.
154
+
155
+ :class:`msgspec.Struct` | :class:`pydantic.BaseModel` | :class:`DataclassProtocol` | :class:`AttrsInstance`
156
+ """
157
+ StatementParameters: TypeAlias = "Union[Any, dict[str, Any], list[Any], tuple[Any, ...], None]"
158
+ """Type alias for statement parameters.
159
+
160
+ Represents:
161
+ - :type:`dict[str, Any]`
162
+ - :type:`list[Any]`
163
+ - :type:`tuple[Any, ...]`
164
+ - :type:`None`
165
+ """
166
+ ModelDTOT = TypeVar("ModelDTOT", bound="SupportedSchemaModel")
167
+ """Type variable for model DTOs.
168
+
169
+ :class:`msgspec.Struct`|:class:`pydantic.BaseModel`
170
+ """
171
+ PydanticOrMsgspecT = SupportedSchemaModel
172
+ """Type alias for pydantic or msgspec models.
173
+
174
+ :class:`msgspec.Struct` or :class:`pydantic.BaseModel`
175
+ """
176
+ ModelDict: TypeAlias = (
177
+ "Union[dict[str, Any], Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub], Any]"
178
+ )
179
+ """Type alias for model dictionaries.
180
+
181
+ Represents:
182
+ - :type:`dict[str, Any]` | :class:`DataclassProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`
183
+ """
184
+ ModelDictList: TypeAlias = (
185
+ "Sequence[Union[dict[str, Any], Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub]]]"
186
+ )
187
+ """Type alias for model dictionary lists.
188
+
189
+ A list or sequence of any of the following:
190
+ - :type:`Sequence`[:type:`dict[str, Any]` | :class:`DataclassProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`]
191
+
192
+ """
193
+ BulkModelDict: TypeAlias = "Union[Sequence[Union[dict[str, Any], Union[DictLike, StructStub, BaseModelStub, DataclassProtocol, AttrsInstanceStub]]], Any]"
194
+ """Type alias for bulk model dictionaries.
195
+
196
+ Represents:
197
+ - :type:`Sequence`[:type:`dict[str, Any]` | :class:`DataclassProtocol` | :class:`msgspec.Struct` | :class:`pydantic.BaseModel`]
198
+ - :class:`DTOData`[:type:`list[ModelT]`]
199
+ """
200
+
201
+
202
+ @lru_cache(typed=True)
203
+ def get_type_adapter(f: "type[T]") -> Any:
204
+ """Caches and returns a pydantic type adapter.
205
+
206
+ Args:
207
+ f: Type to create a type adapter for.
208
+
209
+ Returns:
210
+ :class:`pydantic.TypeAdapter`[:class:`typing.TypeVar`[T]]
211
+ """
212
+ if PYDANTIC_USE_FAILFAST:
213
+ return TypeAdapter(Annotated[f, FailFast()])
214
+ return TypeAdapter(f)
215
+
216
+
217
+ def MixinOf(base: type[T]) -> type[T]: # noqa: N802
218
+ """Useful function to make mixins with baseclass type hint
219
+
220
+ ```
221
+ class StorageMixin(MixinOf(DriverProtocol)): ...
222
+ ```
223
+ """
224
+ if TYPE_CHECKING:
225
+ return base
226
+ return type("<MixinOf>", (base,), {})
227
+
228
+
229
+ __all__ = (
230
+ "AIOSQL_INSTALLED",
231
+ "ATTRS_INSTALLED",
232
+ "CATTRS_INSTALLED",
233
+ "FSSPEC_INSTALLED",
234
+ "LITESTAR_INSTALLED",
235
+ "MSGSPEC_INSTALLED",
236
+ "OBSTORE_INSTALLED",
237
+ "OPENTELEMETRY_INSTALLED",
238
+ "PGVECTOR_INSTALLED",
239
+ "PROMETHEUS_INSTALLED",
240
+ "PYARROW_INSTALLED",
241
+ "PYDANTIC_INSTALLED",
242
+ "PYDANTIC_USE_FAILFAST",
243
+ "UNSET",
244
+ "AiosqlAsyncProtocol",
245
+ "AiosqlParamType",
246
+ "AiosqlProtocol",
247
+ "AiosqlSQLOperationType",
248
+ "AiosqlSyncProtocol",
249
+ "ArrowRecordBatch",
250
+ "ArrowTable",
251
+ "AttrsInstance",
252
+ "BaseModel",
253
+ "BulkModelDict",
254
+ "ConnectionT",
255
+ "Counter",
256
+ "DTOData",
257
+ "DataclassProtocol",
258
+ "DictLike",
259
+ "DictRow",
260
+ "Empty",
261
+ "EmptyEnum",
262
+ "EmptyType",
263
+ "FailFast",
264
+ "Gauge",
265
+ "Histogram",
266
+ "Mapping",
267
+ "MixinOf",
268
+ "ModelDTOT",
269
+ "ModelDict",
270
+ "ModelDict",
271
+ "ModelDictList",
272
+ "ModelDictList",
273
+ "ModelT",
274
+ "PoolT",
275
+ "PoolT_co",
276
+ "PydanticOrMsgspecT",
277
+ "RowT",
278
+ "Span",
279
+ "StatementParameters",
280
+ "Status",
281
+ "StatusCode",
282
+ "Struct",
283
+ "SupportedSchemaModel",
284
+ "Tracer",
285
+ "TupleRow",
286
+ "TypeAdapter",
287
+ "UnsetType",
288
+ "aiosql",
289
+ "attrs_asdict",
290
+ "attrs_define",
291
+ "attrs_field",
292
+ "attrs_fields",
293
+ "attrs_has",
294
+ "cattrs_structure",
295
+ "cattrs_unstructure",
296
+ "convert",
297
+ "get_type_adapter",
298
+ "trace",
299
+ )
@@ -0,0 +1,3 @@
1
+ from sqlspec.utils import deprecation, fixtures, module_loader, singleton, sync_tools, text, type_guards
2
+
3
+ __all__ = ("deprecation", "fixtures", "module_loader", "singleton", "sync_tools", "text", "type_guards")
@@ -0,0 +1,150 @@
1
+ """Correlation ID tracking for distributed tracing.
2
+
3
+ This module provides utilities for tracking correlation IDs across
4
+ database operations, enabling distributed tracing and debugging.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import uuid
10
+ from contextlib import contextmanager
11
+ from contextvars import ContextVar
12
+ from typing import TYPE_CHECKING, Any
13
+
14
+ if TYPE_CHECKING:
15
+ from collections.abc import Generator, MutableMapping
16
+ from logging import LoggerAdapter
17
+
18
+ __all__ = ("CorrelationContext", "correlation_context", "get_correlation_adapter")
19
+
20
+
21
+ class CorrelationContext:
22
+ """Context manager for correlation ID tracking.
23
+
24
+ This class provides a context-aware way to track correlation IDs
25
+ across async and sync operations.
26
+ """
27
+
28
+ _correlation_id: ContextVar[str | None] = ContextVar("sqlspec_correlation_id", default=None)
29
+
30
+ @classmethod
31
+ def get(cls) -> str | None:
32
+ """Get the current correlation ID.
33
+
34
+ Returns:
35
+ The current correlation ID or None if not set
36
+ """
37
+ return cls._correlation_id.get()
38
+
39
+ @classmethod
40
+ def set(cls, correlation_id: str | None) -> None:
41
+ """Set the correlation ID.
42
+
43
+ Args:
44
+ correlation_id: The correlation ID to set
45
+ """
46
+ cls._correlation_id.set(correlation_id)
47
+
48
+ @classmethod
49
+ def generate(cls) -> str:
50
+ """Generate a new correlation ID.
51
+
52
+ Returns:
53
+ A new UUID-based correlation ID
54
+ """
55
+ return str(uuid.uuid4())
56
+
57
+ @classmethod
58
+ @contextmanager
59
+ def context(cls, correlation_id: str | None = None) -> Generator[str, None, None]:
60
+ """Context manager for correlation ID scope.
61
+
62
+ Args:
63
+ correlation_id: The correlation ID to use. If None, generates a new one.
64
+
65
+ Yields:
66
+ The correlation ID being used
67
+ """
68
+ if correlation_id is None:
69
+ correlation_id = cls.generate()
70
+
71
+ previous_id = cls.get()
72
+
73
+ try:
74
+ cls.set(correlation_id)
75
+ yield correlation_id
76
+ finally:
77
+ cls.set(previous_id)
78
+
79
+ @classmethod
80
+ def clear(cls) -> None:
81
+ """Clear the current correlation ID."""
82
+ cls.set(None)
83
+
84
+ @classmethod
85
+ def to_dict(cls) -> dict[str, Any]:
86
+ """Get correlation context as a dictionary.
87
+
88
+ Returns:
89
+ Dictionary with correlation_id key if set
90
+ """
91
+ correlation_id = cls.get()
92
+ return {"correlation_id": correlation_id} if correlation_id else {}
93
+
94
+
95
+ @contextmanager
96
+ def correlation_context(correlation_id: str | None = None) -> Generator[str, None, None]:
97
+ """Convenience context manager for correlation ID tracking.
98
+
99
+ Args:
100
+ correlation_id: Optional correlation ID. If None, generates a new one.
101
+
102
+ Yields:
103
+ The active correlation ID
104
+
105
+ Example:
106
+ ```python
107
+ with correlation_context() as correlation_id:
108
+ logger.info(
109
+ "Processing request",
110
+ extra={"correlation_id": correlation_id},
111
+ )
112
+ ```
113
+ """
114
+ with CorrelationContext.context(correlation_id) as cid:
115
+ yield cid
116
+
117
+
118
+ def get_correlation_adapter(logger: Any) -> LoggerAdapter:
119
+ """Get a logger adapter that automatically includes correlation ID.
120
+
121
+ Args:
122
+ logger: The base logger to wrap
123
+
124
+ Returns:
125
+ LoggerAdapter that includes correlation ID in all logs
126
+ """
127
+ from logging import LoggerAdapter
128
+
129
+ class CorrelationAdapter(LoggerAdapter):
130
+ """Logger adapter that adds correlation ID to all logs."""
131
+
132
+ def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, dict[str, Any]]:
133
+ """Add correlation ID to the log record.
134
+
135
+ Args:
136
+ msg: The log message
137
+ kwargs: Keyword arguments for the log record
138
+
139
+ Returns:
140
+ The message and updated kwargs
141
+ """
142
+ extra = kwargs.get("extra", {})
143
+
144
+ if correlation_id := CorrelationContext.get():
145
+ extra["correlation_id"] = correlation_id
146
+
147
+ kwargs["extra"] = extra
148
+ return msg, dict(kwargs)
149
+
150
+ return CorrelationAdapter(logger, {})