sqlspec 0.20.0__py3-none-any.whl → 0.21.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/driver/mixins/_result_tools.py +41 -6
- sqlspec/extensions/litestar/config.py +186 -2
- sqlspec/extensions/litestar/plugin.py +237 -3
- sqlspec/utils/data_transformation.py +120 -0
- sqlspec/utils/text.py +27 -19
- sqlspec/utils/type_guards.py +74 -0
- {sqlspec-0.20.0.dist-info → sqlspec-0.21.1.dist-info}/METADATA +230 -44
- {sqlspec-0.20.0.dist-info → sqlspec-0.21.1.dist-info}/RECORD +12 -11
- {sqlspec-0.20.0.dist-info → sqlspec-0.21.1.dist-info}/WHEEL +0 -0
- {sqlspec-0.20.0.dist-info → sqlspec-0.21.1.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.20.0.dist-info → sqlspec-0.21.1.dist-info}/licenses/LICENSE +0 -0
- {sqlspec-0.20.0.dist-info → sqlspec-0.21.1.dist-info}/licenses/NOTICE +0 -0
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
# ruff: noqa: C901
|
|
1
2
|
"""Result handling and schema conversion mixins for database drivers."""
|
|
2
3
|
|
|
3
4
|
import datetime
|
|
@@ -22,7 +23,16 @@ from sqlspec.typing import (
|
|
|
22
23
|
convert,
|
|
23
24
|
get_type_adapter,
|
|
24
25
|
)
|
|
25
|
-
from sqlspec.utils.
|
|
26
|
+
from sqlspec.utils.data_transformation import transform_dict_keys
|
|
27
|
+
from sqlspec.utils.text import camelize, kebabize, pascalize
|
|
28
|
+
from sqlspec.utils.type_guards import (
|
|
29
|
+
get_msgspec_rename_config,
|
|
30
|
+
is_attrs_schema,
|
|
31
|
+
is_dataclass,
|
|
32
|
+
is_dict,
|
|
33
|
+
is_msgspec_struct,
|
|
34
|
+
is_pydantic_model,
|
|
35
|
+
)
|
|
26
36
|
|
|
27
37
|
__all__ = ("_DEFAULT_TYPE_DECODERS", "_default_msgspec_deserializer")
|
|
28
38
|
|
|
@@ -143,21 +153,46 @@ class ToSchemaMixin:
|
|
|
143
153
|
if isinstance(data, list):
|
|
144
154
|
result: list[Any] = []
|
|
145
155
|
for item in data:
|
|
146
|
-
if
|
|
156
|
+
if is_dict(item):
|
|
147
157
|
result.append(schema_type(**dict(item))) # type: ignore[operator]
|
|
148
158
|
else:
|
|
149
159
|
result.append(item)
|
|
150
160
|
return result
|
|
151
|
-
if
|
|
161
|
+
if is_dict(data):
|
|
152
162
|
return schema_type(**dict(data)) # type: ignore[operator]
|
|
153
163
|
if isinstance(data, dict):
|
|
154
164
|
return schema_type(**data) # type: ignore[operator]
|
|
155
165
|
return data
|
|
156
166
|
if is_msgspec_struct(schema_type):
|
|
167
|
+
rename_config = get_msgspec_rename_config(schema_type) # type: ignore[arg-type]
|
|
157
168
|
deserializer = partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS)
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
169
|
+
|
|
170
|
+
# Transform field names if rename configuration exists
|
|
171
|
+
transformed_data = data
|
|
172
|
+
if (rename_config and is_dict(data)) or (isinstance(data, Sequence) and data and is_dict(data[0])):
|
|
173
|
+
try:
|
|
174
|
+
converter = None
|
|
175
|
+
if rename_config == "camel":
|
|
176
|
+
converter = camelize
|
|
177
|
+
elif rename_config == "kebab":
|
|
178
|
+
converter = kebabize
|
|
179
|
+
elif rename_config == "pascal":
|
|
180
|
+
converter = pascalize
|
|
181
|
+
|
|
182
|
+
if converter is not None:
|
|
183
|
+
if isinstance(data, Sequence):
|
|
184
|
+
transformed_data = [
|
|
185
|
+
transform_dict_keys(item, converter) if is_dict(item) else item for item in data
|
|
186
|
+
]
|
|
187
|
+
else:
|
|
188
|
+
transformed_data = transform_dict_keys(data, converter) if is_dict(data) else data
|
|
189
|
+
except Exception as e:
|
|
190
|
+
logger.debug("Field name transformation failed for msgspec schema: %s", e)
|
|
191
|
+
transformed_data = data
|
|
192
|
+
|
|
193
|
+
if not isinstance(transformed_data, Sequence):
|
|
194
|
+
return convert(obj=transformed_data, type=schema_type, from_attributes=True, dec_hook=deserializer)
|
|
195
|
+
return convert(obj=transformed_data, type=list[schema_type], from_attributes=True, dec_hook=deserializer) # type: ignore[valid-type]
|
|
161
196
|
if is_pydantic_model(schema_type):
|
|
162
197
|
if not isinstance(data, Sequence):
|
|
163
198
|
adapter = get_type_adapter(schema_type)
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from dataclasses import dataclass, field
|
|
2
|
-
from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, Union
|
|
2
|
+
from typing import TYPE_CHECKING, Any, Callable, Literal, Optional, Union, cast
|
|
3
3
|
|
|
4
4
|
from sqlspec.exceptions import ImproperConfigurationError
|
|
5
|
+
from sqlspec.extensions.litestar._utils import get_sqlspec_scope_state, set_sqlspec_scope_state
|
|
5
6
|
from sqlspec.extensions.litestar.handlers import (
|
|
6
7
|
autocommit_handler_maker,
|
|
7
8
|
connection_provider_maker,
|
|
@@ -13,13 +14,14 @@ from sqlspec.extensions.litestar.handlers import (
|
|
|
13
14
|
|
|
14
15
|
if TYPE_CHECKING:
|
|
15
16
|
from collections.abc import AsyncGenerator, Awaitable
|
|
16
|
-
from contextlib import AbstractAsyncContextManager
|
|
17
|
+
from contextlib import AbstractAsyncContextManager, AbstractContextManager
|
|
17
18
|
|
|
18
19
|
from litestar import Litestar
|
|
19
20
|
from litestar.datastructures.state import State
|
|
20
21
|
from litestar.types import BeforeMessageSendHookHandler, Scope
|
|
21
22
|
|
|
22
23
|
from sqlspec.config import AsyncConfigT, DriverT, SyncConfigT
|
|
24
|
+
from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase
|
|
23
25
|
from sqlspec.typing import ConnectionT, PoolT
|
|
24
26
|
|
|
25
27
|
|
|
@@ -34,8 +36,10 @@ __all__ = (
|
|
|
34
36
|
"DEFAULT_CONNECTION_KEY",
|
|
35
37
|
"DEFAULT_POOL_KEY",
|
|
36
38
|
"DEFAULT_SESSION_KEY",
|
|
39
|
+
"AsyncDatabaseConfig",
|
|
37
40
|
"CommitMode",
|
|
38
41
|
"DatabaseConfig",
|
|
42
|
+
"SyncDatabaseConfig",
|
|
39
43
|
)
|
|
40
44
|
|
|
41
45
|
|
|
@@ -90,3 +94,183 @@ class DatabaseConfig:
|
|
|
90
94
|
self.session_provider = session_provider_maker(
|
|
91
95
|
config=self.config, connection_dependency_key=self.connection_key
|
|
92
96
|
)
|
|
97
|
+
|
|
98
|
+
def get_request_session(
|
|
99
|
+
self, state: "State", scope: "Scope"
|
|
100
|
+
) -> "Union[SyncDriverAdapterBase, AsyncDriverAdapterBase]":
|
|
101
|
+
"""Get a session instance from the current request.
|
|
102
|
+
|
|
103
|
+
This method provides access to the database session that has been added to the request
|
|
104
|
+
scope, similar to Advanced Alchemy's provide_session method. It first looks for an
|
|
105
|
+
existing session in the request scope state, and if not found, creates a new one using
|
|
106
|
+
the connection from the scope.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
state: The Litestar application State object.
|
|
110
|
+
scope: The ASGI scope containing the request context.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
A driver session instance.
|
|
114
|
+
|
|
115
|
+
Raises:
|
|
116
|
+
ImproperConfigurationError: If no connection is available in the scope.
|
|
117
|
+
"""
|
|
118
|
+
# Create a unique scope key for sessions to avoid conflicts
|
|
119
|
+
session_scope_key = f"{self.session_key}_instance"
|
|
120
|
+
|
|
121
|
+
# Try to get existing session from scope
|
|
122
|
+
session = get_sqlspec_scope_state(scope, session_scope_key)
|
|
123
|
+
if session is not None:
|
|
124
|
+
return cast("Union[SyncDriverAdapterBase, AsyncDriverAdapterBase]", session)
|
|
125
|
+
|
|
126
|
+
# Get connection from scope state
|
|
127
|
+
connection = get_sqlspec_scope_state(scope, self.connection_key)
|
|
128
|
+
if connection is None:
|
|
129
|
+
msg = f"No database connection found in scope for key '{self.connection_key}'. "
|
|
130
|
+
msg += "Ensure the connection dependency is properly configured and available."
|
|
131
|
+
raise ImproperConfigurationError(detail=msg)
|
|
132
|
+
|
|
133
|
+
# Create new session using the connection
|
|
134
|
+
# Access driver_type which is available on all config types
|
|
135
|
+
session = self.config.driver_type(connection=connection) # type: ignore[union-attr]
|
|
136
|
+
|
|
137
|
+
# Store session in scope for future use
|
|
138
|
+
set_sqlspec_scope_state(scope, session_scope_key, session)
|
|
139
|
+
|
|
140
|
+
return cast("Union[SyncDriverAdapterBase, AsyncDriverAdapterBase]", session)
|
|
141
|
+
|
|
142
|
+
def get_request_connection(self, state: "State", scope: "Scope") -> "Any":
|
|
143
|
+
"""Get a connection instance from the current request.
|
|
144
|
+
|
|
145
|
+
This method provides access to the database connection that has been added to the request
|
|
146
|
+
scope. This is useful in guards, middleware, or other contexts where you need direct
|
|
147
|
+
access to the connection that's been established for the current request.
|
|
148
|
+
|
|
149
|
+
Args:
|
|
150
|
+
state: The Litestar application State object.
|
|
151
|
+
scope: The ASGI scope containing the request context.
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
A database connection instance.
|
|
155
|
+
|
|
156
|
+
Raises:
|
|
157
|
+
ImproperConfigurationError: If no connection is available in the scope.
|
|
158
|
+
"""
|
|
159
|
+
connection = get_sqlspec_scope_state(scope, self.connection_key)
|
|
160
|
+
if connection is None:
|
|
161
|
+
msg = f"No database connection found in scope for key '{self.connection_key}'. "
|
|
162
|
+
msg += "Ensure the connection dependency is properly configured and available."
|
|
163
|
+
raise ImproperConfigurationError(detail=msg)
|
|
164
|
+
|
|
165
|
+
return cast("Any", connection)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
# Add passthrough methods to both specialized classes for convenience
|
|
169
|
+
class SyncDatabaseConfig(DatabaseConfig):
|
|
170
|
+
"""Sync-specific DatabaseConfig with better typing for get_request_session."""
|
|
171
|
+
|
|
172
|
+
def get_request_session(self, state: "State", scope: "Scope") -> "SyncDriverAdapterBase":
|
|
173
|
+
"""Get a sync session instance from the current request.
|
|
174
|
+
|
|
175
|
+
This method provides access to the database session that has been added to the request
|
|
176
|
+
scope, similar to Advanced Alchemy's provide_session method. It first looks for an
|
|
177
|
+
existing session in the request scope state, and if not found, creates a new one using
|
|
178
|
+
the connection from the scope.
|
|
179
|
+
|
|
180
|
+
Args:
|
|
181
|
+
state: The Litestar application State object.
|
|
182
|
+
scope: The ASGI scope containing the request context.
|
|
183
|
+
|
|
184
|
+
Returns:
|
|
185
|
+
A sync driver session instance.
|
|
186
|
+
"""
|
|
187
|
+
session = super().get_request_session(state, scope)
|
|
188
|
+
return cast("SyncDriverAdapterBase", session)
|
|
189
|
+
|
|
190
|
+
def provide_session(self) -> "AbstractContextManager[SyncDriverAdapterBase]":
|
|
191
|
+
"""Provide a database session context manager.
|
|
192
|
+
|
|
193
|
+
This is a passthrough to the underlying config's provide_session method
|
|
194
|
+
for convenient access to database sessions.
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Context manager that yields a sync driver session.
|
|
198
|
+
"""
|
|
199
|
+
return self.config.provide_session() # type: ignore[union-attr,no-any-return]
|
|
200
|
+
|
|
201
|
+
def provide_connection(self) -> "AbstractContextManager[Any]":
|
|
202
|
+
"""Provide a database connection context manager.
|
|
203
|
+
|
|
204
|
+
This is a passthrough to the underlying config's provide_connection method
|
|
205
|
+
for convenient access to database connections.
|
|
206
|
+
|
|
207
|
+
Returns:
|
|
208
|
+
Context manager that yields a sync database connection.
|
|
209
|
+
"""
|
|
210
|
+
return self.config.provide_connection() # type: ignore[union-attr,no-any-return]
|
|
211
|
+
|
|
212
|
+
def create_connection(self) -> "Any":
|
|
213
|
+
"""Create and return a new database connection.
|
|
214
|
+
|
|
215
|
+
This is a passthrough to the underlying config's create_connection method
|
|
216
|
+
for direct connection creation without context management.
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
A new sync database connection.
|
|
220
|
+
"""
|
|
221
|
+
return self.config.create_connection() # type: ignore[union-attr]
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
class AsyncDatabaseConfig(DatabaseConfig):
|
|
225
|
+
"""Async-specific DatabaseConfig with better typing for get_request_session."""
|
|
226
|
+
|
|
227
|
+
def get_request_session(self, state: "State", scope: "Scope") -> "AsyncDriverAdapterBase":
|
|
228
|
+
"""Get an async session instance from the current request.
|
|
229
|
+
|
|
230
|
+
This method provides access to the database session that has been added to the request
|
|
231
|
+
scope, similar to Advanced Alchemy's provide_session method. It first looks for an
|
|
232
|
+
existing session in the request scope state, and if not found, creates a new one using
|
|
233
|
+
the connection from the scope.
|
|
234
|
+
|
|
235
|
+
Args:
|
|
236
|
+
state: The Litestar application State object.
|
|
237
|
+
scope: The ASGI scope containing the request context.
|
|
238
|
+
|
|
239
|
+
Returns:
|
|
240
|
+
An async driver session instance.
|
|
241
|
+
"""
|
|
242
|
+
session = super().get_request_session(state, scope)
|
|
243
|
+
return cast("AsyncDriverAdapterBase", session)
|
|
244
|
+
|
|
245
|
+
def provide_session(self) -> "AbstractAsyncContextManager[AsyncDriverAdapterBase]":
|
|
246
|
+
"""Provide a database session context manager.
|
|
247
|
+
|
|
248
|
+
This is a passthrough to the underlying config's provide_session method
|
|
249
|
+
for convenient access to database sessions.
|
|
250
|
+
|
|
251
|
+
Returns:
|
|
252
|
+
Context manager that yields an async driver session.
|
|
253
|
+
"""
|
|
254
|
+
return self.config.provide_session() # type: ignore[union-attr,no-any-return]
|
|
255
|
+
|
|
256
|
+
def provide_connection(self) -> "AbstractAsyncContextManager[Any]":
|
|
257
|
+
"""Provide a database connection context manager.
|
|
258
|
+
|
|
259
|
+
This is a passthrough to the underlying config's provide_connection method
|
|
260
|
+
for convenient access to database connections.
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
Context manager that yields an async database connection.
|
|
264
|
+
"""
|
|
265
|
+
return self.config.provide_connection() # type: ignore[union-attr,no-any-return]
|
|
266
|
+
|
|
267
|
+
async def create_connection(self) -> "Any":
|
|
268
|
+
"""Create and return a new database connection.
|
|
269
|
+
|
|
270
|
+
This is a passthrough to the underlying config's create_connection method
|
|
271
|
+
for direct connection creation without context management.
|
|
272
|
+
|
|
273
|
+
Returns:
|
|
274
|
+
A new async database connection.
|
|
275
|
+
"""
|
|
276
|
+
return await self.config.create_connection() # type: ignore[union-attr]
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import TYPE_CHECKING, Optional, Union
|
|
1
|
+
from typing import TYPE_CHECKING, Any, Optional, Union, cast, overload
|
|
2
2
|
|
|
3
3
|
from litestar.di import Provide
|
|
4
4
|
from litestar.plugins import CLIPlugin, InitPluginProtocol
|
|
@@ -6,14 +6,17 @@ from litestar.plugins import CLIPlugin, InitPluginProtocol
|
|
|
6
6
|
from sqlspec.base import SQLSpec as SQLSpecBase
|
|
7
7
|
from sqlspec.config import AsyncConfigT, DatabaseConfigProtocol, DriverT, SyncConfigT
|
|
8
8
|
from sqlspec.exceptions import ImproperConfigurationError
|
|
9
|
-
from sqlspec.extensions.litestar.config import DatabaseConfig
|
|
9
|
+
from sqlspec.extensions.litestar.config import AsyncDatabaseConfig, DatabaseConfig, SyncDatabaseConfig
|
|
10
10
|
from sqlspec.typing import ConnectionT, PoolT
|
|
11
11
|
from sqlspec.utils.logging import get_logger
|
|
12
12
|
|
|
13
13
|
if TYPE_CHECKING:
|
|
14
14
|
from click import Group
|
|
15
15
|
from litestar.config.app import AppConfig
|
|
16
|
+
from litestar.datastructures.state import State
|
|
17
|
+
from litestar.types import Scope
|
|
16
18
|
|
|
19
|
+
from sqlspec.driver import AsyncDriverAdapterBase, SyncDriverAdapterBase
|
|
17
20
|
from sqlspec.loader import SQLFileLoader
|
|
18
21
|
|
|
19
22
|
logger = get_logger("extensions.litestar")
|
|
@@ -131,11 +134,242 @@ class SQLSpec(SQLSpecBase, InitPluginProtocol, CLIPlugin):
|
|
|
131
134
|
The annotation for the configuration.
|
|
132
135
|
"""
|
|
133
136
|
for c in self.config:
|
|
134
|
-
|
|
137
|
+
# Check annotation only if it's been set (during on_app_init)
|
|
138
|
+
annotation_match = hasattr(c, "annotation") and key == c.annotation
|
|
139
|
+
if key == c.config or annotation_match or key in {c.connection_key, c.pool_key}:
|
|
140
|
+
if not hasattr(c, "annotation"):
|
|
141
|
+
msg = (
|
|
142
|
+
"Annotation not set for configuration. Ensure the plugin has been initialized with on_app_init."
|
|
143
|
+
)
|
|
144
|
+
raise AttributeError(msg)
|
|
135
145
|
return c.annotation
|
|
136
146
|
msg = f"No configuration found for {key}"
|
|
137
147
|
raise KeyError(msg)
|
|
138
148
|
|
|
149
|
+
@overload
|
|
150
|
+
def get_config(self, name: "type[SyncConfigT]") -> "SyncConfigT": ...
|
|
151
|
+
|
|
152
|
+
@overload
|
|
153
|
+
def get_config(self, name: "type[AsyncConfigT]") -> "AsyncConfigT": ...
|
|
154
|
+
|
|
155
|
+
@overload
|
|
156
|
+
def get_config(self, name: str) -> "DatabaseConfig": ...
|
|
157
|
+
|
|
158
|
+
@overload
|
|
159
|
+
def get_config(self, name: "type[SyncDatabaseConfig]") -> "SyncDatabaseConfig": ...
|
|
160
|
+
|
|
161
|
+
@overload
|
|
162
|
+
def get_config(self, name: "type[AsyncDatabaseConfig]") -> "AsyncDatabaseConfig": ...
|
|
163
|
+
|
|
164
|
+
def get_config(
|
|
165
|
+
self, name: "Union[type[DatabaseConfigProtocol[ConnectionT, PoolT, DriverT]], str, Any]"
|
|
166
|
+
) -> "Union[DatabaseConfigProtocol[ConnectionT, PoolT, DriverT], DatabaseConfig, SyncDatabaseConfig, AsyncDatabaseConfig]":
|
|
167
|
+
"""Get a configuration instance by name, supporting both base behavior and Litestar extensions.
|
|
168
|
+
|
|
169
|
+
This method extends the base get_config to support Litestar-specific lookup patterns
|
|
170
|
+
while maintaining compatibility with the base class signature. It supports lookup by
|
|
171
|
+
connection key, pool key, session key, config instance, or annotation type.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
name: The configuration identifier - can be:
|
|
175
|
+
- Type annotation (base class behavior)
|
|
176
|
+
- connection_key (e.g., "auth_db_connection")
|
|
177
|
+
- pool_key (e.g., "analytics_db_pool")
|
|
178
|
+
- session_key (e.g., "reporting_db_session")
|
|
179
|
+
- config instance
|
|
180
|
+
- annotation type
|
|
181
|
+
|
|
182
|
+
Raises:
|
|
183
|
+
KeyError: If no configuration is found for the given name.
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
The configuration instance for the specified name.
|
|
187
|
+
"""
|
|
188
|
+
# First try base class behavior for type-based lookup
|
|
189
|
+
# Only call super() if name matches the expected base class types
|
|
190
|
+
if not isinstance(name, str):
|
|
191
|
+
try:
|
|
192
|
+
return super().get_config(name) # type: ignore[no-any-return]
|
|
193
|
+
except (KeyError, AttributeError):
|
|
194
|
+
# Fall back to Litestar-specific lookup patterns
|
|
195
|
+
pass
|
|
196
|
+
|
|
197
|
+
# Litestar-specific lookups by string keys
|
|
198
|
+
if isinstance(name, str):
|
|
199
|
+
for c in self.config:
|
|
200
|
+
if name in {c.connection_key, c.pool_key, c.session_key}:
|
|
201
|
+
return c # Return the DatabaseConfig wrapper for string lookups
|
|
202
|
+
|
|
203
|
+
# Lookup by config instance or annotation
|
|
204
|
+
for c in self.config:
|
|
205
|
+
annotation_match = hasattr(c, "annotation") and name == c.annotation
|
|
206
|
+
if name == c.config or annotation_match:
|
|
207
|
+
return c.config # Return the underlying config for type-based lookups
|
|
208
|
+
|
|
209
|
+
msg = f"No database configuration found for name '{name}'. Available keys: {self._get_available_keys()}"
|
|
210
|
+
raise KeyError(msg)
|
|
211
|
+
|
|
212
|
+
def provide_request_session(
|
|
213
|
+
self,
|
|
214
|
+
key: "Union[str, SyncConfigT, AsyncConfigT, type[Union[SyncConfigT, AsyncConfigT]]]",
|
|
215
|
+
state: "State",
|
|
216
|
+
scope: "Scope",
|
|
217
|
+
) -> "Union[SyncDriverAdapterBase, AsyncDriverAdapterBase]":
|
|
218
|
+
"""Provide a database session for the specified configuration key from request scope.
|
|
219
|
+
|
|
220
|
+
This is a convenience method that combines get_config and get_request_session
|
|
221
|
+
into a single call, similar to Advanced Alchemy's provide_session pattern.
|
|
222
|
+
|
|
223
|
+
Args:
|
|
224
|
+
key: The configuration identifier (same as get_config)
|
|
225
|
+
state: The Litestar application State object
|
|
226
|
+
scope: The ASGI scope containing the request context
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
A driver session instance for the specified database configuration
|
|
230
|
+
|
|
231
|
+
Example:
|
|
232
|
+
>>> sqlspec_plugin = connection.app.state.sqlspec
|
|
233
|
+
>>> # Direct session access by key
|
|
234
|
+
>>> auth_session = sqlspec_plugin.provide_request_session(
|
|
235
|
+
... "auth_db", state, scope
|
|
236
|
+
... )
|
|
237
|
+
>>> analytics_session = sqlspec_plugin.provide_request_session(
|
|
238
|
+
... "analytics_db", state, scope
|
|
239
|
+
... )
|
|
240
|
+
"""
|
|
241
|
+
# Get DatabaseConfig wrapper for Litestar methods
|
|
242
|
+
db_config = self._get_database_config(key)
|
|
243
|
+
return db_config.get_request_session(state, scope)
|
|
244
|
+
|
|
245
|
+
def provide_sync_request_session(
|
|
246
|
+
self, key: "Union[str, SyncConfigT, type[SyncConfigT]]", state: "State", scope: "Scope"
|
|
247
|
+
) -> "SyncDriverAdapterBase":
|
|
248
|
+
"""Provide a sync database session for the specified configuration key from request scope.
|
|
249
|
+
|
|
250
|
+
This method provides better type hints for sync database sessions, ensuring the returned
|
|
251
|
+
session is properly typed as SyncDriverAdapterBase for better IDE support and type safety.
|
|
252
|
+
|
|
253
|
+
Args:
|
|
254
|
+
key: The sync configuration identifier
|
|
255
|
+
state: The Litestar application State object
|
|
256
|
+
scope: The ASGI scope containing the request context
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
A sync driver session instance for the specified database configuration
|
|
260
|
+
|
|
261
|
+
Example:
|
|
262
|
+
>>> sqlspec_plugin = connection.app.state.sqlspec
|
|
263
|
+
>>> auth_session = sqlspec_plugin.provide_sync_request_session(
|
|
264
|
+
... "auth_db", state, scope
|
|
265
|
+
... )
|
|
266
|
+
>>> # auth_session is now correctly typed as SyncDriverAdapterBase
|
|
267
|
+
"""
|
|
268
|
+
# Get DatabaseConfig wrapper for Litestar methods
|
|
269
|
+
db_config = self._get_database_config(key)
|
|
270
|
+
session = db_config.get_request_session(state, scope)
|
|
271
|
+
return cast("SyncDriverAdapterBase", session)
|
|
272
|
+
|
|
273
|
+
def provide_async_request_session(
|
|
274
|
+
self, key: "Union[str, AsyncConfigT, type[AsyncConfigT]]", state: "State", scope: "Scope"
|
|
275
|
+
) -> "AsyncDriverAdapterBase":
|
|
276
|
+
"""Provide an async database session for the specified configuration key from request scope.
|
|
277
|
+
|
|
278
|
+
This method provides better type hints for async database sessions, ensuring the returned
|
|
279
|
+
session is properly typed as AsyncDriverAdapterBase for better IDE support and type safety.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
key: The async configuration identifier
|
|
283
|
+
state: The Litestar application State object
|
|
284
|
+
scope: The ASGI scope containing the request context
|
|
285
|
+
|
|
286
|
+
Returns:
|
|
287
|
+
An async driver session instance for the specified database configuration
|
|
288
|
+
|
|
289
|
+
Example:
|
|
290
|
+
>>> sqlspec_plugin = connection.app.state.sqlspec
|
|
291
|
+
>>> auth_session = sqlspec_plugin.provide_async_request_session(
|
|
292
|
+
... "auth_db", state, scope
|
|
293
|
+
... )
|
|
294
|
+
>>> # auth_session is now correctly typed as AsyncDriverAdapterBase
|
|
295
|
+
"""
|
|
296
|
+
# Get DatabaseConfig wrapper for Litestar methods
|
|
297
|
+
db_config = self._get_database_config(key)
|
|
298
|
+
session = db_config.get_request_session(state, scope)
|
|
299
|
+
return cast("AsyncDriverAdapterBase", session)
|
|
300
|
+
|
|
301
|
+
def provide_request_connection(
|
|
302
|
+
self,
|
|
303
|
+
key: "Union[str, SyncConfigT, AsyncConfigT, type[Union[SyncConfigT, AsyncConfigT]]]",
|
|
304
|
+
state: "State",
|
|
305
|
+
scope: "Scope",
|
|
306
|
+
) -> Any:
|
|
307
|
+
"""Provide a database connection for the specified configuration key from request scope.
|
|
308
|
+
|
|
309
|
+
This is a convenience method that combines get_config and get_request_connection
|
|
310
|
+
into a single call.
|
|
311
|
+
|
|
312
|
+
Args:
|
|
313
|
+
key: The configuration identifier (same as get_config)
|
|
314
|
+
state: The Litestar application State object
|
|
315
|
+
scope: The ASGI scope containing the request context
|
|
316
|
+
|
|
317
|
+
Returns:
|
|
318
|
+
A database connection instance for the specified database configuration
|
|
319
|
+
|
|
320
|
+
Example:
|
|
321
|
+
>>> sqlspec_plugin = connection.app.state.sqlspec
|
|
322
|
+
>>> # Direct connection access by key
|
|
323
|
+
>>> auth_conn = sqlspec_plugin.provide_request_connection(
|
|
324
|
+
... "auth_db", state, scope
|
|
325
|
+
... )
|
|
326
|
+
>>> analytics_conn = sqlspec_plugin.provide_request_connection(
|
|
327
|
+
... "analytics_db", state, scope
|
|
328
|
+
... )
|
|
329
|
+
"""
|
|
330
|
+
# Get DatabaseConfig wrapper for Litestar methods
|
|
331
|
+
db_config = self._get_database_config(key)
|
|
332
|
+
return db_config.get_request_connection(state, scope)
|
|
333
|
+
|
|
334
|
+
def _get_database_config(
|
|
335
|
+
self, key: "Union[str, SyncConfigT, AsyncConfigT, type[Union[SyncConfigT, AsyncConfigT]]]"
|
|
336
|
+
) -> DatabaseConfig:
|
|
337
|
+
"""Get a DatabaseConfig wrapper instance by name.
|
|
338
|
+
|
|
339
|
+
This is used internally by provide_request_session and provide_request_connection
|
|
340
|
+
to get the DatabaseConfig wrapper that has the request session methods.
|
|
341
|
+
|
|
342
|
+
Args:
|
|
343
|
+
key: The configuration identifier
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
The DatabaseConfig wrapper instance
|
|
347
|
+
|
|
348
|
+
Raises:
|
|
349
|
+
KeyError: If no configuration is found for the given key
|
|
350
|
+
"""
|
|
351
|
+
# For string keys, lookup by connection/pool/session keys
|
|
352
|
+
if isinstance(key, str):
|
|
353
|
+
for c in self.config:
|
|
354
|
+
if key in {c.connection_key, c.pool_key, c.session_key}:
|
|
355
|
+
return c
|
|
356
|
+
|
|
357
|
+
# For other keys, lookup by config instance or annotation
|
|
358
|
+
for c in self.config:
|
|
359
|
+
annotation_match = hasattr(c, "annotation") and key == c.annotation
|
|
360
|
+
if key == c.config or annotation_match:
|
|
361
|
+
return c
|
|
362
|
+
|
|
363
|
+
msg = f"No database configuration found for name '{key}'. Available keys: {self._get_available_keys()}"
|
|
364
|
+
raise KeyError(msg)
|
|
365
|
+
|
|
366
|
+
def _get_available_keys(self) -> "list[str]":
|
|
367
|
+
"""Get a list of all available configuration keys for error messages."""
|
|
368
|
+
keys = []
|
|
369
|
+
for c in self.config:
|
|
370
|
+
keys.extend([c.connection_key, c.pool_key, c.session_key])
|
|
371
|
+
return keys
|
|
372
|
+
|
|
139
373
|
def _validate_dependency_keys(self) -> None:
|
|
140
374
|
"""Validate that connection and pool keys are unique across configurations.
|
|
141
375
|
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""Data transformation utilities for SQLSpec.
|
|
2
|
+
|
|
3
|
+
Provides functions for transforming data structures, particularly for
|
|
4
|
+
field name conversion when mapping database results to schema objects.
|
|
5
|
+
Used primarily for msgspec field name conversion with rename configurations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from typing import Any, Callable, Union
|
|
9
|
+
|
|
10
|
+
__all__ = ("transform_dict_keys",)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _safe_convert_key(key: Any, converter: Callable[[str], str]) -> Any:
|
|
14
|
+
"""Safely convert a key using the converter function.
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
key: Key to convert (may not be a string).
|
|
18
|
+
converter: Function to convert string keys.
|
|
19
|
+
|
|
20
|
+
Returns:
|
|
21
|
+
Converted key if conversion succeeds, original key otherwise.
|
|
22
|
+
"""
|
|
23
|
+
if not isinstance(key, str):
|
|
24
|
+
return key
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
return converter(key)
|
|
28
|
+
except (TypeError, ValueError, AttributeError):
|
|
29
|
+
# If conversion fails, return the original key
|
|
30
|
+
return key
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def transform_dict_keys(data: Union[dict, list, Any], converter: Callable[[str], str]) -> Union[dict, list, Any]:
|
|
34
|
+
"""Transform dictionary keys using the provided converter function.
|
|
35
|
+
|
|
36
|
+
Recursively transforms all dictionary keys in a data structure using
|
|
37
|
+
the provided converter function. Handles nested dictionaries, lists
|
|
38
|
+
of dictionaries, and preserves non-dict values unchanged.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
data: The data structure to transform. Can be a dict, list, or any other type.
|
|
42
|
+
converter: Function to convert string keys (e.g., camelize, kebabize).
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
The transformed data structure with converted keys. Non-dict values
|
|
46
|
+
are returned unchanged.
|
|
47
|
+
|
|
48
|
+
Examples:
|
|
49
|
+
Transform snake_case keys to camelCase:
|
|
50
|
+
|
|
51
|
+
>>> from sqlspec.utils.text import camelize
|
|
52
|
+
>>> data = {"user_id": 123, "created_at": "2024-01-01"}
|
|
53
|
+
>>> transform_dict_keys(data, camelize)
|
|
54
|
+
{"userId": 123, "createdAt": "2024-01-01"}
|
|
55
|
+
|
|
56
|
+
Transform nested structures:
|
|
57
|
+
|
|
58
|
+
>>> nested = {
|
|
59
|
+
... "user_data": {"first_name": "John", "last_name": "Doe"},
|
|
60
|
+
... "order_items": [
|
|
61
|
+
... {"item_id": 1, "item_name": "Product A"},
|
|
62
|
+
... {"item_id": 2, "item_name": "Product B"},
|
|
63
|
+
... ],
|
|
64
|
+
... }
|
|
65
|
+
>>> transform_dict_keys(nested, camelize)
|
|
66
|
+
{
|
|
67
|
+
"userData": {
|
|
68
|
+
"firstName": "John",
|
|
69
|
+
"lastName": "Doe"
|
|
70
|
+
},
|
|
71
|
+
"orderItems": [
|
|
72
|
+
{"itemId": 1, "itemName": "Product A"},
|
|
73
|
+
{"itemId": 2, "itemName": "Product B"}
|
|
74
|
+
]
|
|
75
|
+
}
|
|
76
|
+
"""
|
|
77
|
+
if isinstance(data, dict):
|
|
78
|
+
return _transform_dict(data, converter)
|
|
79
|
+
if isinstance(data, list):
|
|
80
|
+
return _transform_list(data, converter)
|
|
81
|
+
return data
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _transform_dict(data: dict, converter: Callable[[str], str]) -> dict:
|
|
85
|
+
"""Transform a dictionary's keys recursively.
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
data: Dictionary to transform.
|
|
89
|
+
converter: Function to convert string keys.
|
|
90
|
+
|
|
91
|
+
Returns:
|
|
92
|
+
Dictionary with transformed keys and recursively transformed values.
|
|
93
|
+
"""
|
|
94
|
+
transformed = {}
|
|
95
|
+
|
|
96
|
+
for key, value in data.items():
|
|
97
|
+
# Convert the key using the provided converter function
|
|
98
|
+
# Use safe conversion that handles edge cases without try-except
|
|
99
|
+
converted_key = _safe_convert_key(key, converter)
|
|
100
|
+
|
|
101
|
+
# Recursively transform the value
|
|
102
|
+
transformed_value = transform_dict_keys(value, converter)
|
|
103
|
+
|
|
104
|
+
transformed[converted_key] = transformed_value
|
|
105
|
+
|
|
106
|
+
return transformed
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _transform_list(data: list, converter: Callable[[str], str]) -> list:
|
|
110
|
+
"""Transform a list's elements recursively.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
data: List to transform.
|
|
114
|
+
converter: Function to convert string keys in nested structures.
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
List with recursively transformed elements.
|
|
118
|
+
"""
|
|
119
|
+
# Use list comprehension for better performance and avoid try-except in loop
|
|
120
|
+
return [transform_dict_keys(item, converter) for item in data]
|
sqlspec/utils/text.py
CHANGED
|
@@ -19,25 +19,7 @@ _SNAKE_CASE_HYPHEN_SPACE = re.compile(r"[.\s@-]+", re.UNICODE)
|
|
|
19
19
|
_SNAKE_CASE_REMOVE_NON_WORD = re.compile(r"[^\w]+", re.UNICODE)
|
|
20
20
|
_SNAKE_CASE_MULTIPLE_UNDERSCORES = re.compile(r"__+", re.UNICODE)
|
|
21
21
|
|
|
22
|
-
__all__ = ("camelize", "
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
def check_email(email: str) -> str:
|
|
26
|
-
"""Validate an email address.
|
|
27
|
-
|
|
28
|
-
Args:
|
|
29
|
-
email: The email to validate.
|
|
30
|
-
|
|
31
|
-
Raises:
|
|
32
|
-
ValueError: If the email is invalid.
|
|
33
|
-
|
|
34
|
-
Returns:
|
|
35
|
-
The validated email.
|
|
36
|
-
"""
|
|
37
|
-
if "@" not in email:
|
|
38
|
-
msg = "Invalid email!"
|
|
39
|
-
raise ValueError(msg)
|
|
40
|
-
return email.lower()
|
|
22
|
+
__all__ = ("camelize", "kebabize", "pascalize", "slugify", "snake_case")
|
|
41
23
|
|
|
42
24
|
|
|
43
25
|
def slugify(value: str, allow_unicode: bool = False, separator: Optional[str] = None) -> str:
|
|
@@ -80,6 +62,32 @@ def camelize(string: str) -> str:
|
|
|
80
62
|
return "".join(word if index == 0 else word.capitalize() for index, word in enumerate(string.split("_")))
|
|
81
63
|
|
|
82
64
|
|
|
65
|
+
@lru_cache(maxsize=100)
|
|
66
|
+
def kebabize(string: str) -> str:
|
|
67
|
+
"""Convert a string to kebab-case.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
string: The string to convert.
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
The kebab-case version of the string.
|
|
74
|
+
"""
|
|
75
|
+
return "-".join(word.lower() for word in string.split("_") if word)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@lru_cache(maxsize=100)
|
|
79
|
+
def pascalize(string: str) -> str:
|
|
80
|
+
"""Convert a string to PascalCase.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
string: The string to convert.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
The PascalCase version of the string.
|
|
87
|
+
"""
|
|
88
|
+
return "".join(word.capitalize() for word in string.split("_") if word)
|
|
89
|
+
|
|
90
|
+
|
|
83
91
|
@lru_cache(maxsize=100)
|
|
84
92
|
def snake_case(string: str) -> str:
|
|
85
93
|
"""Convert a string to snake_case.
|
sqlspec/utils/type_guards.py
CHANGED
|
@@ -6,6 +6,7 @@ understand type narrowing, replacing defensive hasattr() and duck typing pattern
|
|
|
6
6
|
|
|
7
7
|
from collections.abc import Sequence
|
|
8
8
|
from collections.abc import Set as AbstractSet
|
|
9
|
+
from functools import lru_cache
|
|
9
10
|
from typing import TYPE_CHECKING, Any, Optional, Union, cast
|
|
10
11
|
|
|
11
12
|
from sqlspec.typing import (
|
|
@@ -59,6 +60,7 @@ __all__ = (
|
|
|
59
60
|
"extract_dataclass_items",
|
|
60
61
|
"get_initial_expression",
|
|
61
62
|
"get_literal_parent",
|
|
63
|
+
"get_msgspec_rename_config",
|
|
62
64
|
"get_node_expressions",
|
|
63
65
|
"get_node_this",
|
|
64
66
|
"get_param_style_and_name",
|
|
@@ -429,6 +431,78 @@ def is_msgspec_struct_without_field(obj: Any, field_name: str) -> "TypeGuard[Str
|
|
|
429
431
|
return False
|
|
430
432
|
|
|
431
433
|
|
|
434
|
+
@lru_cache(maxsize=500)
|
|
435
|
+
def _detect_rename_pattern(field_name: str, encode_name: str) -> "Optional[str]":
|
|
436
|
+
"""Detect the rename pattern by comparing field name transformations.
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
field_name: Original field name (e.g., "user_id")
|
|
440
|
+
encode_name: Encoded field name (e.g., "userId")
|
|
441
|
+
|
|
442
|
+
Returns:
|
|
443
|
+
The detected rename pattern ("camel", "kebab", "pascal") or None
|
|
444
|
+
"""
|
|
445
|
+
from sqlspec.utils.text import camelize, kebabize, pascalize
|
|
446
|
+
|
|
447
|
+
# Test camelCase conversion
|
|
448
|
+
if encode_name == camelize(field_name) and encode_name != field_name:
|
|
449
|
+
return "camel"
|
|
450
|
+
|
|
451
|
+
if encode_name == kebabize(field_name) and encode_name != field_name:
|
|
452
|
+
return "kebab"
|
|
453
|
+
|
|
454
|
+
if encode_name == pascalize(field_name) and encode_name != field_name:
|
|
455
|
+
return "pascal"
|
|
456
|
+
return None
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
def get_msgspec_rename_config(schema_type: type) -> "Optional[str]":
|
|
460
|
+
"""Extract msgspec rename configuration from a struct type.
|
|
461
|
+
|
|
462
|
+
Analyzes field name transformations to detect the rename pattern used by msgspec.
|
|
463
|
+
Since msgspec doesn't store the original rename parameter directly, we infer it
|
|
464
|
+
by comparing field names with their encode_name values.
|
|
465
|
+
|
|
466
|
+
Args:
|
|
467
|
+
schema_type: The msgspec struct type to inspect.
|
|
468
|
+
|
|
469
|
+
Returns:
|
|
470
|
+
The rename configuration value ("camel", "kebab", "pascal", etc.) if detected,
|
|
471
|
+
None if no rename configuration exists or if not a msgspec struct.
|
|
472
|
+
|
|
473
|
+
Examples:
|
|
474
|
+
>>> class User(msgspec.Struct, rename="camel"):
|
|
475
|
+
... user_id: int
|
|
476
|
+
>>> get_msgspec_rename_config(User)
|
|
477
|
+
"camel"
|
|
478
|
+
|
|
479
|
+
>>> class Product(msgspec.Struct):
|
|
480
|
+
... product_id: int
|
|
481
|
+
>>> get_msgspec_rename_config(Product)
|
|
482
|
+
None
|
|
483
|
+
"""
|
|
484
|
+
if not MSGSPEC_INSTALLED:
|
|
485
|
+
return None
|
|
486
|
+
|
|
487
|
+
if not is_msgspec_struct(schema_type):
|
|
488
|
+
return None
|
|
489
|
+
|
|
490
|
+
from msgspec import structs
|
|
491
|
+
|
|
492
|
+
fields = structs.fields(schema_type) # type: ignore[arg-type]
|
|
493
|
+
if not fields:
|
|
494
|
+
return None
|
|
495
|
+
|
|
496
|
+
# Check if any field name differs from its encode_name
|
|
497
|
+
for field in fields:
|
|
498
|
+
if field.name != field.encode_name:
|
|
499
|
+
# Detect the rename pattern by comparing transformations
|
|
500
|
+
return _detect_rename_pattern(field.name, field.encode_name)
|
|
501
|
+
|
|
502
|
+
# If all field names match their encode_name, no rename is applied
|
|
503
|
+
return None
|
|
504
|
+
|
|
505
|
+
|
|
432
506
|
def is_attrs_instance(obj: Any) -> "TypeGuard[AttrsInstanceStub]":
|
|
433
507
|
"""Check if a value is an attrs class instance.
|
|
434
508
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: sqlspec
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.21.1
|
|
4
4
|
Summary: SQL Experiments in Python
|
|
5
5
|
Project-URL: Discord, https://discord.gg/litestar
|
|
6
6
|
Project-URL: Issue, https://github.com/litestar-org/sqlspec/issues/
|
|
@@ -130,16 +130,21 @@ These are just a few examples that demonstrate SQLSpec's flexibility. Each of th
|
|
|
130
130
|
```python
|
|
131
131
|
from sqlspec import SQLSpec
|
|
132
132
|
from sqlspec.adapters.sqlite import SqliteConfig
|
|
133
|
-
|
|
133
|
+
|
|
134
134
|
# Create SQLSpec instance and configure database
|
|
135
|
-
|
|
136
|
-
config =
|
|
135
|
+
db_manager = SQLSpec()
|
|
136
|
+
config = SqliteConfig(pool_config={"database": ":memory:"}) # Thread local pooling
|
|
137
|
+
db_manager.add_config(config)
|
|
137
138
|
|
|
138
139
|
# Execute queries with automatic result mapping
|
|
139
|
-
with
|
|
140
|
+
with db_manager.provide_session(config) as session:
|
|
140
141
|
# Simple query
|
|
141
142
|
result = session.execute("SELECT 'Hello, SQLSpec!' as message")
|
|
142
143
|
print(result.get_first()) # {'message': 'Hello, SQLSpec!'}
|
|
144
|
+
|
|
145
|
+
# Type-safe single row query
|
|
146
|
+
row = session.select_one("SELECT 'Hello, SQLSpec!' as message")
|
|
147
|
+
print(row) # {'message': 'Hello, SQLSpec!'}
|
|
143
148
|
```
|
|
144
149
|
|
|
145
150
|
### SQL Builder Example (Experimental)
|
|
@@ -150,30 +155,94 @@ with sql.provide_session(config) as session:
|
|
|
150
155
|
from sqlspec import sql
|
|
151
156
|
|
|
152
157
|
# Build a simple query
|
|
153
|
-
query = sql.select("id", "name", "email").from_("users").where("active = ?"
|
|
154
|
-
|
|
158
|
+
query = sql.select("id", "name", "email").from_("users").where("active = ?")
|
|
159
|
+
statement = query.to_statement()
|
|
160
|
+
print(statement.sql) # SELECT id, name, email FROM users WHERE active = ?
|
|
155
161
|
|
|
156
162
|
# More complex example with joins
|
|
157
163
|
query = (
|
|
158
164
|
sql.select("u.name", "COUNT(o.id) as order_count")
|
|
159
165
|
.from_("users u")
|
|
160
166
|
.left_join("orders o", "u.id = o.user_id")
|
|
161
|
-
.where("u.created_at > ?"
|
|
167
|
+
.where("u.created_at > ?")
|
|
162
168
|
.group_by("u.name")
|
|
163
|
-
.having("COUNT(o.id) > ?"
|
|
169
|
+
.having("COUNT(o.id) > ?")
|
|
164
170
|
.order_by("order_count", desc=True)
|
|
165
171
|
)
|
|
166
172
|
|
|
167
|
-
# Execute the built query
|
|
168
|
-
with
|
|
169
|
-
results = session.execute(query
|
|
173
|
+
# Execute the built query with parameters
|
|
174
|
+
with db_manager.provide_session(config) as session:
|
|
175
|
+
results = session.execute(query, "2024-01-01", 5)
|
|
176
|
+
```
|
|
177
|
+
|
|
178
|
+
### Type-Safe Result Mapping
|
|
179
|
+
|
|
180
|
+
SQLSpec supports automatic mapping to typed models using popular libraries:
|
|
181
|
+
|
|
182
|
+
```python
|
|
183
|
+
from sqlspec import SQLSpec
|
|
184
|
+
from sqlspec.adapters.sqlite import SqliteConfig
|
|
185
|
+
from pydantic import BaseModel
|
|
186
|
+
|
|
187
|
+
class User(BaseModel):
|
|
188
|
+
id: int
|
|
189
|
+
name: str
|
|
190
|
+
email: str
|
|
191
|
+
|
|
192
|
+
db_manager = SQLSpec()
|
|
193
|
+
config = SqliteConfig(pool_config={"database": ":memory:"})
|
|
194
|
+
db_manager.add_config(config)
|
|
195
|
+
|
|
196
|
+
with db_manager.provide_session(config) as session:
|
|
197
|
+
# Create and populate test data
|
|
198
|
+
session.execute_script("""
|
|
199
|
+
CREATE TABLE users (id INTEGER, name TEXT, email TEXT);
|
|
200
|
+
INSERT INTO users VALUES (1, 'Alice', 'alice@example.com');
|
|
201
|
+
""")
|
|
202
|
+
# Map single result to typed model
|
|
203
|
+
user = session.select_one("SELECT * FROM users WHERE id = ?", 1, schema_type=User)
|
|
204
|
+
print(f"User: {user.name} ({user.email})")
|
|
205
|
+
|
|
206
|
+
# Map multiple results
|
|
207
|
+
users = session.select("SELECT * FROM users", schema_type=User)
|
|
208
|
+
for user in users:
|
|
209
|
+
print(f"User: {user.name}")
|
|
210
|
+
```
|
|
211
|
+
|
|
212
|
+
### Session Methods Overview
|
|
213
|
+
|
|
214
|
+
SQLSpec provides several convenient methods for executing queries:
|
|
215
|
+
|
|
216
|
+
```python
|
|
217
|
+
with db_manager.provide_session(config) as session:
|
|
218
|
+
# Execute any SQL and get full result set
|
|
219
|
+
result = session.execute("SELECT * FROM users")
|
|
220
|
+
|
|
221
|
+
# Get single row (raises error if not found)
|
|
222
|
+
user = session.select_one("SELECT * FROM users WHERE id = ?", 1)
|
|
223
|
+
|
|
224
|
+
# Get single row or None (no error if not found)
|
|
225
|
+
maybe_user = session.select_one_or_none("SELECT * FROM users WHERE id = ?", 999)
|
|
226
|
+
|
|
227
|
+
# Execute with many parameter sets (bulk operations)
|
|
228
|
+
session.execute_many(
|
|
229
|
+
"INSERT INTO users (name, email) VALUES (?, ?)",
|
|
230
|
+
[("Bob", "bob@example.com"), ("Carol", "carol@example.com")]
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
# Execute multiple statements as a script
|
|
234
|
+
session.execute_script("""
|
|
235
|
+
CREATE TABLE IF NOT EXISTS logs (id INTEGER, message TEXT);
|
|
236
|
+
INSERT INTO logs (message) VALUES ('System started');
|
|
237
|
+
""")
|
|
170
238
|
```
|
|
171
239
|
|
|
172
|
-
|
|
240
|
+
<details>
|
|
241
|
+
<summary>🦆 DuckDB LLM Integration Example</summary>
|
|
173
242
|
|
|
174
243
|
This is a quick implementation using some of the built-in Secret and Extension management features of SQLSpec's DuckDB integration.
|
|
175
244
|
|
|
176
|
-
It allows you to communicate with any compatible
|
|
245
|
+
It allows you to communicate with any compatible OpenAI conversations endpoint (such as Ollama). This example:
|
|
177
246
|
|
|
178
247
|
- auto installs the `open_prompt` DuckDB extensions
|
|
179
248
|
- automatically creates the correct `open_prompt` compatible secret required to use the extension
|
|
@@ -193,11 +262,12 @@ from pydantic import BaseModel
|
|
|
193
262
|
class ChatMessage(BaseModel):
|
|
194
263
|
message: str
|
|
195
264
|
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
265
|
+
db_manager = SQLSpec()
|
|
266
|
+
config = DuckDBConfig(
|
|
267
|
+
pool_config={"database": ":memory:"},
|
|
268
|
+
driver_features={
|
|
269
|
+
"extensions": [{"name": "open_prompt"}],
|
|
270
|
+
"secrets": [
|
|
201
271
|
{
|
|
202
272
|
"secret_type": "open_prompt",
|
|
203
273
|
"name": "open_prompt",
|
|
@@ -208,9 +278,11 @@ etl_config = sql.add_config(
|
|
|
208
278
|
},
|
|
209
279
|
}
|
|
210
280
|
],
|
|
211
|
-
|
|
281
|
+
},
|
|
212
282
|
)
|
|
213
|
-
|
|
283
|
+
db_manager.add_config(config)
|
|
284
|
+
|
|
285
|
+
with db_manager.provide_session(config) as session:
|
|
214
286
|
result = session.select_one(
|
|
215
287
|
"SELECT open_prompt(?)",
|
|
216
288
|
"Can you write a haiku about DuckDB?",
|
|
@@ -219,7 +291,10 @@ with sql.provide_session(etl_config) as session:
|
|
|
219
291
|
print(result) # result is a ChatMessage pydantic model
|
|
220
292
|
```
|
|
221
293
|
|
|
222
|
-
|
|
294
|
+
</details>
|
|
295
|
+
|
|
296
|
+
<details>
|
|
297
|
+
<summary>🔗 DuckDB Gemini Embeddings Example</summary>
|
|
223
298
|
|
|
224
299
|
In this example, we are again using DuckDB. However, we are going to use the built-in to call the Google Gemini embeddings service directly from the database.
|
|
225
300
|
|
|
@@ -246,11 +321,12 @@ API_URL = (
|
|
|
246
321
|
f"https://generativelanguage.googleapis.com/v1beta/models/{EMBEDDING_MODEL}:embedContent?key=${GOOGLE_API_KEY}"
|
|
247
322
|
)
|
|
248
323
|
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
324
|
+
db_manager = SQLSpec()
|
|
325
|
+
config = DuckDBConfig(
|
|
326
|
+
pool_config={"database": ":memory:"},
|
|
327
|
+
driver_features={
|
|
328
|
+
"extensions": [{"name": "vss"}, {"name": "http_client"}],
|
|
329
|
+
"on_connection_create": lambda connection: connection.execute(f"""
|
|
254
330
|
CREATE IF NOT EXISTS MACRO generate_embedding(q) AS (
|
|
255
331
|
WITH __request AS (
|
|
256
332
|
SELECT http_post(
|
|
@@ -269,16 +345,77 @@ etl_config = sql.add_config(
|
|
|
269
345
|
FROM __request,
|
|
270
346
|
);
|
|
271
347
|
"""),
|
|
272
|
-
|
|
348
|
+
},
|
|
273
349
|
)
|
|
274
|
-
|
|
350
|
+
db_manager.add_config(config)
|
|
351
|
+
|
|
352
|
+
with db_manager.provide_session(config) as session:
|
|
275
353
|
result = session.execute("SELECT generate_embedding('example text')")
|
|
276
354
|
print(result.get_first()) # result is a dictionary when `schema_type` is omitted.
|
|
277
355
|
```
|
|
278
356
|
|
|
357
|
+
</details>
|
|
358
|
+
|
|
359
|
+
### SQL File Loading
|
|
360
|
+
|
|
361
|
+
SQLSpec can load and manage SQL queries from files using aiosql-style named queries:
|
|
362
|
+
|
|
363
|
+
```python
|
|
364
|
+
from sqlspec import SQLSpec
|
|
365
|
+
from sqlspec.loader import SQLFileLoader
|
|
366
|
+
from sqlspec.adapters.sqlite import SqliteConfig
|
|
367
|
+
|
|
368
|
+
# Initialize with SQL file loader
|
|
369
|
+
db_manager = SQLSpec(loader=SQLFileLoader())
|
|
370
|
+
config = SqliteConfig(pool_config={"database": ":memory:"})
|
|
371
|
+
db_manager.add_config(config)
|
|
372
|
+
|
|
373
|
+
# Load SQL files from directory
|
|
374
|
+
db_manager.load_sql_files("./sql")
|
|
375
|
+
|
|
376
|
+
# SQL file: ./sql/users.sql
|
|
377
|
+
# -- name: get_user
|
|
378
|
+
# SELECT * FROM users WHERE id = ?
|
|
379
|
+
#
|
|
380
|
+
# -- name: create_user
|
|
381
|
+
# INSERT INTO users (name, email) VALUES (?, ?)
|
|
382
|
+
|
|
383
|
+
with db_manager.provide_session(config) as session:
|
|
384
|
+
# Use named queries from files
|
|
385
|
+
user = session.execute(db_manager.get_sql("get_user"), 1)
|
|
386
|
+
session.execute(db_manager.get_sql("create_user"), "Alice", "alice@example.com")
|
|
387
|
+
```
|
|
388
|
+
|
|
389
|
+
### Database Migrations
|
|
390
|
+
|
|
391
|
+
SQLSpec includes a built-in migration system for managing schema changes. After configuring your database with migration settings, use the CLI commands:
|
|
392
|
+
|
|
393
|
+
```bash
|
|
394
|
+
# Initialize migration directory
|
|
395
|
+
sqlspec db init migrations
|
|
396
|
+
|
|
397
|
+
# Generate new migration file
|
|
398
|
+
sqlspec db make-migrations "Add user table"
|
|
399
|
+
|
|
400
|
+
# Apply all pending migrations
|
|
401
|
+
sqlspec db upgrade
|
|
402
|
+
|
|
403
|
+
# Show current migration status
|
|
404
|
+
sqlspec db show-current-revision
|
|
405
|
+
```
|
|
406
|
+
|
|
407
|
+
For Litestar applications, replace `sqlspec` with your application command:
|
|
408
|
+
|
|
409
|
+
```bash
|
|
410
|
+
# Using Litestar CLI integration
|
|
411
|
+
litestar db make-migrations "Add user table"
|
|
412
|
+
litestar db upgrade
|
|
413
|
+
litestar db show-current-revision
|
|
414
|
+
```
|
|
415
|
+
|
|
279
416
|
### Basic Litestar Integration
|
|
280
417
|
|
|
281
|
-
In this example we
|
|
418
|
+
In this example we demonstrate how to create a basic configuration that integrates into Litestar:
|
|
282
419
|
|
|
283
420
|
```py
|
|
284
421
|
# /// script
|
|
@@ -301,7 +438,7 @@ async def simple_sqlite(db_session: AiosqliteDriver) -> dict[str, str]:
|
|
|
301
438
|
|
|
302
439
|
sqlspec = SQLSpec(
|
|
303
440
|
config=DatabaseConfig(
|
|
304
|
-
config=AiosqliteConfig(),
|
|
441
|
+
config=AiosqliteConfig(pool_config={"database": ":memory:"}), # built in local pooling
|
|
305
442
|
commit_mode="autocommit"
|
|
306
443
|
)
|
|
307
444
|
)
|
|
@@ -320,6 +457,41 @@ The primary goal at this stage is to establish a **native connectivity interface
|
|
|
320
457
|
|
|
321
458
|
This list is not final. If you have a driver you'd like to see added, please open an issue or submit a PR!
|
|
322
459
|
|
|
460
|
+
### Configuration Examples
|
|
461
|
+
|
|
462
|
+
Each adapter uses a consistent configuration pattern with `pool_config` for connection parameters:
|
|
463
|
+
|
|
464
|
+
```python
|
|
465
|
+
# SQLite
|
|
466
|
+
SqliteConfig(pool_config={"database": "/path/to/database.db"})
|
|
467
|
+
AiosqliteConfig(pool_config={"database": "/path/to/database.db"}) # Async
|
|
468
|
+
AdbcConfig(connection_config={"uri": "sqlite:///path/to/database.db"}) # ADBC
|
|
469
|
+
|
|
470
|
+
# PostgreSQL (multiple drivers available)
|
|
471
|
+
PsycopgSyncConfig(pool_config={"host": "localhost", "database": "mydb", "user": "user", "password": "pass"})
|
|
472
|
+
PsycopgAsyncConfig(pool_config={"host": "localhost", "database": "mydb", "user": "user", "password": "pass"}) # Async
|
|
473
|
+
AsyncpgConfig(pool_config={"host": "localhost", "database": "mydb", "user": "user", "password": "pass"})
|
|
474
|
+
PsqlpyConfig(pool_config={"dsn": "postgresql://user:pass@localhost/mydb"})
|
|
475
|
+
AdbcConfig(connection_config={"uri": "postgresql://user:pass@localhost/mydb"}) # ADBC
|
|
476
|
+
|
|
477
|
+
# DuckDB
|
|
478
|
+
DuckDBConfig(pool_config={"database": ":memory:"}) # or file path
|
|
479
|
+
AdbcConfig(connection_config={"uri": "duckdb:///path/to/database.duckdb"}) # ADBC
|
|
480
|
+
|
|
481
|
+
# MySQL
|
|
482
|
+
AsyncmyConfig(pool_config={"host": "localhost", "database": "mydb", "user": "user", "password": "pass"}) # Async
|
|
483
|
+
|
|
484
|
+
# Oracle
|
|
485
|
+
OracleSyncConfig(pool_config={"host": "localhost", "service_name": "XEPDB1", "user": "user", "password": "pass"})
|
|
486
|
+
OracleAsyncConfig(pool_config={"host": "localhost", "service_name": "XEPDB1", "user": "user", "password": "pass"}) # Async
|
|
487
|
+
|
|
488
|
+
# BigQuery
|
|
489
|
+
BigQueryConfig(pool_config={"project": "my-project", "dataset": "my_dataset"})
|
|
490
|
+
AdbcConfig(connection_config={"driver_name": "adbc_driver_bigquery", "project_id": "my-project", "dataset_id": "my_dataset"}) # ADBC
|
|
491
|
+
```
|
|
492
|
+
|
|
493
|
+
### Supported Drivers
|
|
494
|
+
|
|
323
495
|
| Driver | Database | Mode | Status |
|
|
324
496
|
| :----------------------------------------------------------------------------------------------------------- | :--------- | :------ | :--------- |
|
|
325
497
|
| [`adbc`](https://arrow.apache.org/adbc/) | Postgres | Sync | ✅ |
|
|
@@ -342,21 +514,35 @@ This list is not final. If you have a driver you'd like to see added, please ope
|
|
|
342
514
|
| [`asyncmy`](https://github.com/long2ice/asyncmy) | MySQL | Async | ✅ |
|
|
343
515
|
| [`snowflake`](https://docs.snowflake.com) | Snowflake | Sync | 🗓️ |
|
|
344
516
|
|
|
345
|
-
##
|
|
517
|
+
## Project Structure
|
|
346
518
|
|
|
347
519
|
- `sqlspec/`:
|
|
348
|
-
- `adapters/`:
|
|
349
|
-
- `extensions/`:
|
|
350
|
-
- `litestar/`: Litestar framework integration ✅
|
|
351
|
-
- `
|
|
352
|
-
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
- `
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
520
|
+
- `adapters/`: Database-specific drivers and configuration classes for all supported databases
|
|
521
|
+
- `extensions/`: Framework integrations and external library adapters
|
|
522
|
+
- `litestar/`: Litestar web framework integration with dependency injection ✅
|
|
523
|
+
- `aiosql/`: Integration with aiosql for SQL file loading ✅
|
|
524
|
+
- Future integrations: `fastapi/`, `flask/`, etc.
|
|
525
|
+
- `builder/`: Fluent SQL query builder with method chaining and type safety
|
|
526
|
+
- `mixins/`: Composable query building operations (WHERE, JOIN, ORDER BY, etc.)
|
|
527
|
+
- `core/`: Core query processing infrastructure
|
|
528
|
+
- `statement.py`: SQL statement wrapper with metadata and type information
|
|
529
|
+
- `parameters.py`: Parameter style conversion and validation
|
|
530
|
+
- `result.py`: Result set handling and type mapping
|
|
531
|
+
- `compiler.py`: SQL compilation and validation using SQLGlot
|
|
532
|
+
- `cache.py`: Statement caching for performance optimization
|
|
533
|
+
- `driver/`: Base driver system with sync/async support and transaction management
|
|
534
|
+
- `mixins/`: Shared driver capabilities (result processing, SQL translation)
|
|
535
|
+
- `migrations/`: Database migration system with CLI commands
|
|
536
|
+
- `storage/`: Unified data import/export operations with multiple backends
|
|
537
|
+
- `backends/`: Storage backend implementations (fsspec, obstore)
|
|
538
|
+
- `utils/`: Utility functions, type guards, and helper tools
|
|
539
|
+
- `base.py`: Main SQLSpec registry and configuration manager
|
|
540
|
+
- `loader.py`: SQL file loading system for `.sql` files
|
|
541
|
+
- `cli.py`: Command-line interface for migrations and database operations
|
|
542
|
+
- `config.py`: Base configuration classes and protocols
|
|
543
|
+
- `protocols.py`: Type protocols for runtime type checking
|
|
544
|
+
- `exceptions.py`: Custom exception hierarchy for SQLSpec
|
|
545
|
+
- `typing.py`: Type definitions, guards, and optional dependency facades
|
|
360
546
|
|
|
361
547
|
## Get Involved
|
|
362
548
|
|
|
@@ -93,7 +93,7 @@ sqlspec/driver/_async.py,sha256=aS5AwY5IYqnVT8ldDLDwz2AMDN94CI9hfsOz-1k-Nus,1899
|
|
|
93
93
|
sqlspec/driver/_common.py,sha256=Fi5NCy5_OVlRKDzUpGsLJn3zDmrsVSsXggMRndIMM1E,23879
|
|
94
94
|
sqlspec/driver/_sync.py,sha256=wCBV9QfAH8BPjrrVCQc2eM90ai5-FYbKDd81L5sZMS0,18767
|
|
95
95
|
sqlspec/driver/mixins/__init__.py,sha256=gN4pQyJXxNy0xi91dcMJGA7DQ7TbjGjQI24SSpZc6Go,248
|
|
96
|
-
sqlspec/driver/mixins/_result_tools.py,sha256=
|
|
96
|
+
sqlspec/driver/mixins/_result_tools.py,sha256=0LquMpoLBJrqkxaAPU4Wvn2JElyqdwRAnRlwGTX-D7w,8603
|
|
97
97
|
sqlspec/driver/mixins/_sql_translator.py,sha256=TACtUUJdx8tJwuq_7g3AR_k0bKokvuJrMEwINyWwdQM,3711
|
|
98
98
|
sqlspec/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
99
99
|
sqlspec/extensions/aiosql/__init__.py,sha256=-9cefc9pYPf9vCgALoB-y1DtmcgRjKe2azfl6RIarAA,414
|
|
@@ -101,9 +101,9 @@ sqlspec/extensions/aiosql/adapter.py,sha256=WshAQkpNJQ9zCMcL7EuAc6axl90GwRp3oBZ4
|
|
|
101
101
|
sqlspec/extensions/litestar/__init__.py,sha256=tOmQ7RHSWOot7p30gk0efxxuP0OCq1opyyZqNmQY7FE,320
|
|
102
102
|
sqlspec/extensions/litestar/_utils.py,sha256=iaicqnnkC5CuDwJKStz0T7lFaYMrgR96SYKZpe71v2g,1950
|
|
103
103
|
sqlspec/extensions/litestar/cli.py,sha256=X4DlAx3Ry-ccOjAQSxe8SMtyJKCFJVLTbENPU_efKuU,1356
|
|
104
|
-
sqlspec/extensions/litestar/config.py,sha256=
|
|
104
|
+
sqlspec/extensions/litestar/config.py,sha256=AXAQsjc_tb88wk31d4v31e_6oOtDKz9DS0NA7wg_3Q8,12414
|
|
105
105
|
sqlspec/extensions/litestar/handlers.py,sha256=DXYO1FUOmG3YE4E7RlxWCPNl8YgbUzVO8pHgwZeuDOw,10625
|
|
106
|
-
sqlspec/extensions/litestar/plugin.py,sha256=
|
|
106
|
+
sqlspec/extensions/litestar/plugin.py,sha256=4G_r3lna1TClWXnYXJwAYAH52z_t9cbrbLBn83Utmpg,15974
|
|
107
107
|
sqlspec/extensions/litestar/providers.py,sha256=5LRb5JvRV_XZdNOKkdaIy3j5x-dFCcAi1ea1pgwuapI,18882
|
|
108
108
|
sqlspec/migrations/__init__.py,sha256=RiDi_HkUIgXtu_33QnRdvYNqcCn-euHUiWwTiPr5IGc,1055
|
|
109
109
|
sqlspec/migrations/base.py,sha256=vIzQzUtQrNKDec6XUeRHcCBuWU1KNtRCFpOvVxsp3sQ,13093
|
|
@@ -121,6 +121,7 @@ sqlspec/storage/backends/fsspec.py,sha256=8AX8ULwlApMd6WtHDVJBomdvk5UjPtfMf78TJ8
|
|
|
121
121
|
sqlspec/storage/backends/obstore.py,sha256=v9moaiSVKbpl9MOrb9AAhhSXfQb1ifammrT8WeV1xcw,19735
|
|
122
122
|
sqlspec/utils/__init__.py,sha256=cNFX26-bLyZTyTfujUitfDkUy1CeG_d-EIr8kZ0z4W8,474
|
|
123
123
|
sqlspec/utils/correlation.py,sha256=2jvkAY3nkU3UxNU_9pbBR6cz3A1Q1cGG9IaWSSOIb1Q,4195
|
|
124
|
+
sqlspec/utils/data_transformation.py,sha256=U37zyxR4f5PxsxKdC7QzcMyJxfqpsXUxgH_ch5l3PbY,3951
|
|
124
125
|
sqlspec/utils/deprecation.py,sha256=iy7xzws6Kx0oQpX94smyZzTY6ijdODrdSEFFEXZfp5o,3980
|
|
125
126
|
sqlspec/utils/fixtures.py,sha256=qnPAdkV91dyKOqslm_TH5UZ8mx4koQMwriE5YPn3PgI,9425
|
|
126
127
|
sqlspec/utils/logging.py,sha256=zAM7rHJ-KsmAj1yjvU9QFoiwf4Q2hKTere2J62FlllI,3664
|
|
@@ -128,11 +129,11 @@ sqlspec/utils/module_loader.py,sha256=rO4ht-fUSJ3Us7L_7fb_G9bdMCoUSABGUA0pc3ouh9
|
|
|
128
129
|
sqlspec/utils/serializers.py,sha256=GXsTkJbWAhRS7xDMk6WBouZwPeG4sI_brLdMBlIetNg,318
|
|
129
130
|
sqlspec/utils/singleton.py,sha256=-j-s6LS0pP_wTEUYIyK2wSdoeIE_tn7O7B-j7_aODRQ,1252
|
|
130
131
|
sqlspec/utils/sync_tools.py,sha256=ksfxsvFb1hLrDlxzwdW44OvYgRB0Fr5JDqxswfHwoOs,8744
|
|
131
|
-
sqlspec/utils/text.py,sha256=
|
|
132
|
-
sqlspec/utils/type_guards.py,sha256=
|
|
133
|
-
sqlspec-0.
|
|
134
|
-
sqlspec-0.
|
|
135
|
-
sqlspec-0.
|
|
136
|
-
sqlspec-0.
|
|
137
|
-
sqlspec-0.
|
|
138
|
-
sqlspec-0.
|
|
132
|
+
sqlspec/utils/text.py,sha256=ZqaXCVuUbdj_110pdTYjmAxfV3ZtR7J6EixuNazQLFY,3333
|
|
133
|
+
sqlspec/utils/type_guards.py,sha256=ktXwBQLLqOvk1W2wJcmk3bUprrsegs8nAZ879qDe0AU,32880
|
|
134
|
+
sqlspec-0.21.1.dist-info/METADATA,sha256=diZWPfeC58LVHYhjp1WTNH1JOPqkrJZLq3ZwdS5nKVU,23548
|
|
135
|
+
sqlspec-0.21.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
136
|
+
sqlspec-0.21.1.dist-info/entry_points.txt,sha256=G-ZqY1Nuuw3Iys7nXw23f6ILenk_Lt47VdK2mhJCWHg,53
|
|
137
|
+
sqlspec-0.21.1.dist-info/licenses/LICENSE,sha256=MdujfZ6l5HuLz4mElxlu049itenOR3gnhN1_Nd3nVcM,1078
|
|
138
|
+
sqlspec-0.21.1.dist-info/licenses/NOTICE,sha256=Lyir8ozXWov7CyYS4huVaOCNrtgL17P-bNV-5daLntQ,1634
|
|
139
|
+
sqlspec-0.21.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|