sqlspec 0.25.0__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +7 -15
- sqlspec/_serialization.py +256 -24
- sqlspec/_typing.py +71 -52
- sqlspec/adapters/adbc/_types.py +1 -1
- sqlspec/adapters/adbc/adk/__init__.py +5 -0
- sqlspec/adapters/adbc/adk/store.py +870 -0
- sqlspec/adapters/adbc/config.py +69 -12
- sqlspec/adapters/adbc/data_dictionary.py +340 -0
- sqlspec/adapters/adbc/driver.py +266 -58
- sqlspec/adapters/adbc/litestar/__init__.py +5 -0
- sqlspec/adapters/adbc/litestar/store.py +504 -0
- sqlspec/adapters/adbc/type_converter.py +153 -0
- sqlspec/adapters/aiosqlite/_types.py +1 -1
- sqlspec/adapters/aiosqlite/adk/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/adk/store.py +527 -0
- sqlspec/adapters/aiosqlite/config.py +88 -15
- sqlspec/adapters/aiosqlite/data_dictionary.py +149 -0
- sqlspec/adapters/aiosqlite/driver.py +143 -40
- sqlspec/adapters/aiosqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/aiosqlite/litestar/store.py +281 -0
- sqlspec/adapters/aiosqlite/pool.py +7 -7
- sqlspec/adapters/asyncmy/__init__.py +7 -1
- sqlspec/adapters/asyncmy/_types.py +2 -2
- sqlspec/adapters/asyncmy/adk/__init__.py +5 -0
- sqlspec/adapters/asyncmy/adk/store.py +493 -0
- sqlspec/adapters/asyncmy/config.py +68 -23
- sqlspec/adapters/asyncmy/data_dictionary.py +161 -0
- sqlspec/adapters/asyncmy/driver.py +313 -58
- sqlspec/adapters/asyncmy/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncmy/litestar/store.py +296 -0
- sqlspec/adapters/asyncpg/__init__.py +2 -1
- sqlspec/adapters/asyncpg/_type_handlers.py +71 -0
- sqlspec/adapters/asyncpg/_types.py +11 -7
- sqlspec/adapters/asyncpg/adk/__init__.py +5 -0
- sqlspec/adapters/asyncpg/adk/store.py +450 -0
- sqlspec/adapters/asyncpg/config.py +59 -35
- sqlspec/adapters/asyncpg/data_dictionary.py +173 -0
- sqlspec/adapters/asyncpg/driver.py +170 -25
- sqlspec/adapters/asyncpg/litestar/__init__.py +5 -0
- sqlspec/adapters/asyncpg/litestar/store.py +253 -0
- sqlspec/adapters/bigquery/_types.py +1 -1
- sqlspec/adapters/bigquery/adk/__init__.py +5 -0
- sqlspec/adapters/bigquery/adk/store.py +576 -0
- sqlspec/adapters/bigquery/config.py +27 -10
- sqlspec/adapters/bigquery/data_dictionary.py +149 -0
- sqlspec/adapters/bigquery/driver.py +368 -142
- sqlspec/adapters/bigquery/litestar/__init__.py +5 -0
- sqlspec/adapters/bigquery/litestar/store.py +327 -0
- sqlspec/adapters/bigquery/type_converter.py +125 -0
- sqlspec/adapters/duckdb/_types.py +1 -1
- sqlspec/adapters/duckdb/adk/__init__.py +14 -0
- sqlspec/adapters/duckdb/adk/store.py +553 -0
- sqlspec/adapters/duckdb/config.py +80 -20
- sqlspec/adapters/duckdb/data_dictionary.py +163 -0
- sqlspec/adapters/duckdb/driver.py +167 -45
- sqlspec/adapters/duckdb/litestar/__init__.py +5 -0
- sqlspec/adapters/duckdb/litestar/store.py +332 -0
- sqlspec/adapters/duckdb/pool.py +4 -4
- sqlspec/adapters/duckdb/type_converter.py +133 -0
- sqlspec/adapters/oracledb/_numpy_handlers.py +133 -0
- sqlspec/adapters/oracledb/_types.py +20 -2
- sqlspec/adapters/oracledb/adk/__init__.py +5 -0
- sqlspec/adapters/oracledb/adk/store.py +1745 -0
- sqlspec/adapters/oracledb/config.py +122 -32
- sqlspec/adapters/oracledb/data_dictionary.py +509 -0
- sqlspec/adapters/oracledb/driver.py +353 -91
- sqlspec/adapters/oracledb/litestar/__init__.py +5 -0
- sqlspec/adapters/oracledb/litestar/store.py +767 -0
- sqlspec/adapters/oracledb/migrations.py +348 -73
- sqlspec/adapters/oracledb/type_converter.py +207 -0
- sqlspec/adapters/psqlpy/_type_handlers.py +44 -0
- sqlspec/adapters/psqlpy/_types.py +2 -1
- sqlspec/adapters/psqlpy/adk/__init__.py +5 -0
- sqlspec/adapters/psqlpy/adk/store.py +482 -0
- sqlspec/adapters/psqlpy/config.py +46 -17
- sqlspec/adapters/psqlpy/data_dictionary.py +172 -0
- sqlspec/adapters/psqlpy/driver.py +123 -209
- sqlspec/adapters/psqlpy/litestar/__init__.py +5 -0
- sqlspec/adapters/psqlpy/litestar/store.py +272 -0
- sqlspec/adapters/psqlpy/type_converter.py +102 -0
- sqlspec/adapters/psycopg/_type_handlers.py +80 -0
- sqlspec/adapters/psycopg/_types.py +2 -1
- sqlspec/adapters/psycopg/adk/__init__.py +5 -0
- sqlspec/adapters/psycopg/adk/store.py +944 -0
- sqlspec/adapters/psycopg/config.py +69 -35
- sqlspec/adapters/psycopg/data_dictionary.py +331 -0
- sqlspec/adapters/psycopg/driver.py +238 -81
- sqlspec/adapters/psycopg/litestar/__init__.py +5 -0
- sqlspec/adapters/psycopg/litestar/store.py +554 -0
- sqlspec/adapters/sqlite/__init__.py +2 -1
- sqlspec/adapters/sqlite/_type_handlers.py +86 -0
- sqlspec/adapters/sqlite/_types.py +1 -1
- sqlspec/adapters/sqlite/adk/__init__.py +5 -0
- sqlspec/adapters/sqlite/adk/store.py +572 -0
- sqlspec/adapters/sqlite/config.py +87 -15
- sqlspec/adapters/sqlite/data_dictionary.py +149 -0
- sqlspec/adapters/sqlite/driver.py +137 -54
- sqlspec/adapters/sqlite/litestar/__init__.py +5 -0
- sqlspec/adapters/sqlite/litestar/store.py +318 -0
- sqlspec/adapters/sqlite/pool.py +18 -9
- sqlspec/base.py +45 -26
- sqlspec/builder/__init__.py +73 -4
- sqlspec/builder/_base.py +162 -89
- sqlspec/builder/_column.py +62 -29
- sqlspec/builder/_ddl.py +180 -121
- sqlspec/builder/_delete.py +5 -4
- sqlspec/builder/_dml.py +388 -0
- sqlspec/{_sql.py → builder/_factory.py} +53 -94
- sqlspec/builder/_insert.py +32 -131
- sqlspec/builder/_join.py +375 -0
- sqlspec/builder/_merge.py +446 -11
- sqlspec/builder/_parsing_utils.py +111 -17
- sqlspec/builder/_select.py +1457 -24
- sqlspec/builder/_update.py +11 -42
- sqlspec/cli.py +307 -194
- sqlspec/config.py +252 -67
- sqlspec/core/__init__.py +5 -4
- sqlspec/core/cache.py +17 -17
- sqlspec/core/compiler.py +62 -9
- sqlspec/core/filters.py +37 -37
- sqlspec/core/hashing.py +9 -9
- sqlspec/core/parameters.py +83 -48
- sqlspec/core/result.py +102 -46
- sqlspec/core/splitter.py +16 -17
- sqlspec/core/statement.py +36 -30
- sqlspec/core/type_conversion.py +235 -0
- sqlspec/driver/__init__.py +7 -6
- sqlspec/driver/_async.py +188 -151
- sqlspec/driver/_common.py +285 -80
- sqlspec/driver/_sync.py +188 -152
- sqlspec/driver/mixins/_result_tools.py +20 -236
- sqlspec/driver/mixins/_sql_translator.py +4 -4
- sqlspec/exceptions.py +75 -7
- sqlspec/extensions/adk/__init__.py +53 -0
- sqlspec/extensions/adk/_types.py +51 -0
- sqlspec/extensions/adk/converters.py +172 -0
- sqlspec/extensions/adk/migrations/0001_create_adk_tables.py +144 -0
- sqlspec/extensions/adk/migrations/__init__.py +0 -0
- sqlspec/extensions/adk/service.py +181 -0
- sqlspec/extensions/adk/store.py +536 -0
- sqlspec/extensions/aiosql/adapter.py +73 -53
- sqlspec/extensions/litestar/__init__.py +21 -4
- sqlspec/extensions/litestar/cli.py +54 -10
- sqlspec/extensions/litestar/config.py +59 -266
- sqlspec/extensions/litestar/handlers.py +46 -17
- sqlspec/extensions/litestar/migrations/0001_create_session_table.py +137 -0
- sqlspec/extensions/litestar/migrations/__init__.py +3 -0
- sqlspec/extensions/litestar/plugin.py +324 -223
- sqlspec/extensions/litestar/providers.py +25 -25
- sqlspec/extensions/litestar/store.py +265 -0
- sqlspec/loader.py +30 -49
- sqlspec/migrations/__init__.py +4 -3
- sqlspec/migrations/base.py +302 -39
- sqlspec/migrations/commands.py +611 -144
- sqlspec/migrations/context.py +142 -0
- sqlspec/migrations/fix.py +199 -0
- sqlspec/migrations/loaders.py +68 -23
- sqlspec/migrations/runner.py +543 -107
- sqlspec/migrations/tracker.py +237 -21
- sqlspec/migrations/utils.py +51 -3
- sqlspec/migrations/validation.py +177 -0
- sqlspec/protocols.py +66 -36
- sqlspec/storage/_utils.py +98 -0
- sqlspec/storage/backends/fsspec.py +134 -106
- sqlspec/storage/backends/local.py +78 -51
- sqlspec/storage/backends/obstore.py +278 -162
- sqlspec/storage/registry.py +75 -39
- sqlspec/typing.py +16 -84
- sqlspec/utils/config_resolver.py +153 -0
- sqlspec/utils/correlation.py +4 -5
- sqlspec/utils/data_transformation.py +3 -2
- sqlspec/utils/deprecation.py +9 -8
- sqlspec/utils/fixtures.py +4 -4
- sqlspec/utils/logging.py +46 -6
- sqlspec/utils/module_loader.py +2 -2
- sqlspec/utils/schema.py +288 -0
- sqlspec/utils/serializers.py +50 -2
- sqlspec/utils/sync_tools.py +21 -17
- sqlspec/utils/text.py +1 -2
- sqlspec/utils/type_guards.py +111 -20
- sqlspec/utils/version.py +433 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/METADATA +40 -21
- sqlspec-0.27.0.dist-info/RECORD +207 -0
- sqlspec/builder/mixins/__init__.py +0 -55
- sqlspec/builder/mixins/_cte_and_set_ops.py +0 -254
- sqlspec/builder/mixins/_delete_operations.py +0 -50
- sqlspec/builder/mixins/_insert_operations.py +0 -282
- sqlspec/builder/mixins/_join_operations.py +0 -389
- sqlspec/builder/mixins/_merge_operations.py +0 -592
- sqlspec/builder/mixins/_order_limit_operations.py +0 -152
- sqlspec/builder/mixins/_pivot_operations.py +0 -157
- sqlspec/builder/mixins/_select_operations.py +0 -936
- sqlspec/builder/mixins/_update_operations.py +0 -218
- sqlspec/builder/mixins/_where_clause.py +0 -1304
- sqlspec-0.25.0.dist-info/RECORD +0 -139
- sqlspec-0.25.0.dist-info/licenses/NOTICE +0 -29
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/WHEEL +0 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/entry_points.txt +0 -0
- {sqlspec-0.25.0.dist-info → sqlspec-0.27.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,130 +1,15 @@
|
|
|
1
|
-
# ruff: noqa: C901
|
|
2
1
|
"""Result handling and schema conversion mixins for database drivers."""
|
|
3
2
|
|
|
4
|
-
import
|
|
5
|
-
import logging
|
|
6
|
-
from collections.abc import Sequence
|
|
7
|
-
from enum import Enum
|
|
8
|
-
from functools import partial
|
|
9
|
-
from pathlib import Path, PurePath
|
|
10
|
-
from typing import Any, Callable, Final, Optional, overload
|
|
11
|
-
from uuid import UUID
|
|
3
|
+
from typing import TYPE_CHECKING, Any, overload
|
|
12
4
|
|
|
13
5
|
from mypy_extensions import trait
|
|
14
6
|
|
|
15
|
-
from sqlspec.
|
|
16
|
-
from sqlspec.typing import (
|
|
17
|
-
CATTRS_INSTALLED,
|
|
18
|
-
NUMPY_INSTALLED,
|
|
19
|
-
ModelDTOT,
|
|
20
|
-
ModelT,
|
|
21
|
-
attrs_asdict,
|
|
22
|
-
cattrs_structure,
|
|
23
|
-
cattrs_unstructure,
|
|
24
|
-
convert,
|
|
25
|
-
get_type_adapter,
|
|
26
|
-
)
|
|
27
|
-
from sqlspec.utils.data_transformation import transform_dict_keys
|
|
28
|
-
from sqlspec.utils.text import camelize, kebabize, pascalize
|
|
29
|
-
from sqlspec.utils.type_guards import (
|
|
30
|
-
get_msgspec_rename_config,
|
|
31
|
-
is_attrs_schema,
|
|
32
|
-
is_dataclass,
|
|
33
|
-
is_dict,
|
|
34
|
-
is_msgspec_struct,
|
|
35
|
-
is_pydantic_model,
|
|
36
|
-
)
|
|
7
|
+
from sqlspec.utils.schema import to_schema
|
|
37
8
|
|
|
38
|
-
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from sqlspec.typing import SchemaT
|
|
39
11
|
|
|
40
|
-
|
|
41
|
-
logger = logging.getLogger(__name__)
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
_DATETIME_TYPES: Final[set[type]] = {datetime.datetime, datetime.date, datetime.time}
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
def _is_list_type_target(target_type: Any) -> bool:
|
|
48
|
-
"""Check if target type is a list type (e.g., list[float])."""
|
|
49
|
-
try:
|
|
50
|
-
return hasattr(target_type, "__origin__") and target_type.__origin__ is list
|
|
51
|
-
except (AttributeError, TypeError):
|
|
52
|
-
return False
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def _convert_numpy_to_list(target_type: Any, value: Any) -> Any:
|
|
56
|
-
"""Convert numpy array to list if target is a list type."""
|
|
57
|
-
if not NUMPY_INSTALLED:
|
|
58
|
-
return value
|
|
59
|
-
|
|
60
|
-
import numpy as np
|
|
61
|
-
|
|
62
|
-
if isinstance(value, np.ndarray) and _is_list_type_target(target_type):
|
|
63
|
-
return value.tolist()
|
|
64
|
-
|
|
65
|
-
return value
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
_DEFAULT_TYPE_DECODERS: Final[list[tuple[Callable[[Any], bool], Callable[[Any, Any], Any]]]] = [
|
|
69
|
-
(lambda x: x is UUID, lambda t, v: t(v.hex)),
|
|
70
|
-
(lambda x: x is datetime.datetime, lambda t, v: t(v.isoformat())),
|
|
71
|
-
(lambda x: x is datetime.date, lambda t, v: t(v.isoformat())),
|
|
72
|
-
(lambda x: x is datetime.time, lambda t, v: t(v.isoformat())),
|
|
73
|
-
(lambda x: x is Enum, lambda t, v: t(v.value)),
|
|
74
|
-
(_is_list_type_target, _convert_numpy_to_list),
|
|
75
|
-
]
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
def _default_msgspec_deserializer(
|
|
79
|
-
target_type: Any, value: Any, type_decoders: "Optional[Sequence[tuple[Any, Any]]]" = None
|
|
80
|
-
) -> Any:
|
|
81
|
-
"""Convert msgspec types with type decoder support.
|
|
82
|
-
|
|
83
|
-
Args:
|
|
84
|
-
target_type: Type to convert to
|
|
85
|
-
value: Value to convert
|
|
86
|
-
type_decoders: Optional sequence of (predicate, decoder) pairs
|
|
87
|
-
|
|
88
|
-
Returns:
|
|
89
|
-
Converted value or original value if conversion not applicable
|
|
90
|
-
"""
|
|
91
|
-
# Handle numpy arrays first for list types
|
|
92
|
-
if NUMPY_INSTALLED:
|
|
93
|
-
import numpy as np
|
|
94
|
-
|
|
95
|
-
if isinstance(value, np.ndarray) and _is_list_type_target(target_type):
|
|
96
|
-
return value.tolist()
|
|
97
|
-
|
|
98
|
-
if type_decoders:
|
|
99
|
-
for predicate, decoder in type_decoders:
|
|
100
|
-
if predicate(target_type):
|
|
101
|
-
return decoder(target_type, value)
|
|
102
|
-
|
|
103
|
-
if target_type is UUID and isinstance(value, UUID):
|
|
104
|
-
return value.hex
|
|
105
|
-
|
|
106
|
-
if target_type in _DATETIME_TYPES and hasattr(value, "isoformat"):
|
|
107
|
-
return value.isoformat() # pyright: ignore
|
|
108
|
-
|
|
109
|
-
if isinstance(target_type, type) and issubclass(target_type, Enum) and isinstance(value, Enum):
|
|
110
|
-
return value.value
|
|
111
|
-
|
|
112
|
-
# Check if value is already the correct type (but avoid parameterized generics)
|
|
113
|
-
try:
|
|
114
|
-
if isinstance(target_type, type) and isinstance(value, target_type):
|
|
115
|
-
return value
|
|
116
|
-
except TypeError:
|
|
117
|
-
# Handle parameterized generics like list[int] which can't be used with isinstance
|
|
118
|
-
pass
|
|
119
|
-
|
|
120
|
-
if isinstance(target_type, type):
|
|
121
|
-
try:
|
|
122
|
-
if issubclass(target_type, (Path, PurePath)) or issubclass(target_type, UUID):
|
|
123
|
-
return target_type(str(value))
|
|
124
|
-
except (TypeError, ValueError):
|
|
125
|
-
pass
|
|
126
|
-
|
|
127
|
-
return value
|
|
12
|
+
__all__ = ("ToSchemaMixin",)
|
|
128
13
|
|
|
129
14
|
|
|
130
15
|
@trait
|
|
@@ -135,143 +20,42 @@ class ToSchemaMixin:
|
|
|
135
20
|
|
|
136
21
|
@overload
|
|
137
22
|
@staticmethod
|
|
138
|
-
def to_schema(data: "list[dict[str, Any]]") -> "list[
|
|
139
|
-
@overload
|
|
140
|
-
@staticmethod
|
|
141
|
-
def to_schema(data: "list[dict[str, Any]]", *, schema_type: "type[ModelDTOT]") -> "list[ModelDTOT]": ...
|
|
23
|
+
def to_schema(data: "list[dict[str, Any]]", *, schema_type: "type[SchemaT]") -> "list[SchemaT]": ...
|
|
142
24
|
@overload
|
|
143
25
|
@staticmethod
|
|
144
26
|
def to_schema(data: "list[dict[str, Any]]", *, schema_type: None = None) -> "list[dict[str, Any]]": ...
|
|
145
27
|
@overload
|
|
146
28
|
@staticmethod
|
|
147
|
-
def to_schema(data: "dict[str, Any]"
|
|
148
|
-
@overload
|
|
149
|
-
@staticmethod
|
|
150
|
-
def to_schema(data: "dict[str, Any]", *, schema_type: "type[ModelDTOT]") -> "ModelDTOT": ...
|
|
29
|
+
def to_schema(data: "dict[str, Any]", *, schema_type: "type[SchemaT]") -> "SchemaT": ...
|
|
151
30
|
@overload
|
|
152
31
|
@staticmethod
|
|
153
32
|
def to_schema(data: "dict[str, Any]", *, schema_type: None = None) -> "dict[str, Any]": ...
|
|
154
33
|
@overload
|
|
155
34
|
@staticmethod
|
|
156
|
-
def to_schema(data: "
|
|
157
|
-
@overload
|
|
158
|
-
@staticmethod
|
|
159
|
-
def to_schema(data: "list[ModelT]", *, schema_type: "type[ModelDTOT]") -> "list[ModelDTOT]": ...
|
|
160
|
-
@overload
|
|
161
|
-
@staticmethod
|
|
162
|
-
def to_schema(data: "list[ModelT]", *, schema_type: None = None) -> "list[ModelT]": ...
|
|
163
|
-
@overload
|
|
164
|
-
@staticmethod
|
|
165
|
-
def to_schema(data: "ModelT") -> "ModelT": ...
|
|
35
|
+
def to_schema(data: Any, *, schema_type: "type[SchemaT]") -> Any: ...
|
|
166
36
|
@overload
|
|
167
37
|
@staticmethod
|
|
168
38
|
def to_schema(data: Any, *, schema_type: None = None) -> Any: ...
|
|
169
39
|
|
|
170
40
|
@staticmethod
|
|
171
|
-
def to_schema(data: Any, *, schema_type: "
|
|
41
|
+
def to_schema(data: Any, *, schema_type: "type[Any] | None" = None) -> Any:
|
|
172
42
|
"""Convert data to a specified schema type.
|
|
173
43
|
|
|
44
|
+
Supports transformation to various schema types including:
|
|
45
|
+
- TypedDict
|
|
46
|
+
- dataclasses
|
|
47
|
+
- msgspec Structs
|
|
48
|
+
- Pydantic models
|
|
49
|
+
- attrs classes
|
|
50
|
+
|
|
174
51
|
Args:
|
|
175
|
-
data: Input data to convert
|
|
176
|
-
schema_type: Target schema type for conversion
|
|
52
|
+
data: Input data to convert (dict, list of dicts, or other)
|
|
53
|
+
schema_type: Target schema type for conversion. If None, returns data unchanged.
|
|
177
54
|
|
|
178
55
|
Returns:
|
|
179
|
-
Converted data in the specified schema type
|
|
56
|
+
Converted data in the specified schema type, or original data if schema_type is None
|
|
180
57
|
|
|
181
58
|
Raises:
|
|
182
59
|
SQLSpecError: If schema_type is not a supported type
|
|
183
60
|
"""
|
|
184
|
-
|
|
185
|
-
return data
|
|
186
|
-
if is_dataclass(schema_type):
|
|
187
|
-
if isinstance(data, list):
|
|
188
|
-
result: list[Any] = []
|
|
189
|
-
for item in data:
|
|
190
|
-
if is_dict(item):
|
|
191
|
-
result.append(schema_type(**dict(item))) # type: ignore[operator]
|
|
192
|
-
else:
|
|
193
|
-
result.append(item)
|
|
194
|
-
return result
|
|
195
|
-
if is_dict(data):
|
|
196
|
-
return schema_type(**dict(data)) # type: ignore[operator]
|
|
197
|
-
if isinstance(data, dict):
|
|
198
|
-
return schema_type(**data) # type: ignore[operator]
|
|
199
|
-
return data
|
|
200
|
-
if is_msgspec_struct(schema_type):
|
|
201
|
-
rename_config = get_msgspec_rename_config(schema_type) # type: ignore[arg-type]
|
|
202
|
-
deserializer = partial(_default_msgspec_deserializer, type_decoders=_DEFAULT_TYPE_DECODERS)
|
|
203
|
-
|
|
204
|
-
# Transform field names if rename configuration exists
|
|
205
|
-
transformed_data = data
|
|
206
|
-
if (rename_config and is_dict(data)) or (isinstance(data, Sequence) and data and is_dict(data[0])):
|
|
207
|
-
try:
|
|
208
|
-
converter = None
|
|
209
|
-
if rename_config == "camel":
|
|
210
|
-
converter = camelize
|
|
211
|
-
elif rename_config == "kebab":
|
|
212
|
-
converter = kebabize
|
|
213
|
-
elif rename_config == "pascal":
|
|
214
|
-
converter = pascalize
|
|
215
|
-
|
|
216
|
-
if converter is not None:
|
|
217
|
-
if isinstance(data, Sequence):
|
|
218
|
-
transformed_data = [
|
|
219
|
-
transform_dict_keys(item, converter) if is_dict(item) else item for item in data
|
|
220
|
-
]
|
|
221
|
-
else:
|
|
222
|
-
transformed_data = transform_dict_keys(data, converter) if is_dict(data) else data
|
|
223
|
-
except Exception as e:
|
|
224
|
-
logger.debug("Field name transformation failed for msgspec schema: %s", e)
|
|
225
|
-
transformed_data = data
|
|
226
|
-
|
|
227
|
-
# Pre-process numpy arrays to lists before msgspec conversion
|
|
228
|
-
if NUMPY_INSTALLED:
|
|
229
|
-
try:
|
|
230
|
-
import numpy as np
|
|
231
|
-
|
|
232
|
-
def _convert_numpy_arrays_in_data(obj: Any) -> Any:
|
|
233
|
-
"""Recursively convert numpy arrays to lists in data structures."""
|
|
234
|
-
if isinstance(obj, np.ndarray):
|
|
235
|
-
return obj.tolist()
|
|
236
|
-
if isinstance(obj, dict):
|
|
237
|
-
return {k: _convert_numpy_arrays_in_data(v) for k, v in obj.items()}
|
|
238
|
-
if isinstance(obj, (list, tuple)):
|
|
239
|
-
return type(obj)(_convert_numpy_arrays_in_data(item) for item in obj)
|
|
240
|
-
return obj
|
|
241
|
-
|
|
242
|
-
transformed_data = _convert_numpy_arrays_in_data(transformed_data)
|
|
243
|
-
except ImportError:
|
|
244
|
-
pass
|
|
245
|
-
|
|
246
|
-
if not isinstance(transformed_data, Sequence):
|
|
247
|
-
return convert(obj=transformed_data, type=schema_type, from_attributes=True, dec_hook=deserializer)
|
|
248
|
-
return convert(obj=transformed_data, type=list[schema_type], from_attributes=True, dec_hook=deserializer) # type: ignore[valid-type]
|
|
249
|
-
if is_pydantic_model(schema_type):
|
|
250
|
-
if not isinstance(data, Sequence):
|
|
251
|
-
adapter = get_type_adapter(schema_type)
|
|
252
|
-
return adapter.validate_python(data, from_attributes=True)
|
|
253
|
-
list_adapter = get_type_adapter(list[schema_type]) # type: ignore[valid-type]
|
|
254
|
-
return list_adapter.validate_python(data, from_attributes=True)
|
|
255
|
-
if is_attrs_schema(schema_type):
|
|
256
|
-
if CATTRS_INSTALLED:
|
|
257
|
-
if isinstance(data, Sequence):
|
|
258
|
-
return cattrs_structure(data, list[schema_type]) # type: ignore[valid-type]
|
|
259
|
-
if hasattr(data, "__attrs_attrs__"):
|
|
260
|
-
unstructured_data = cattrs_unstructure(data)
|
|
261
|
-
return cattrs_structure(unstructured_data, schema_type)
|
|
262
|
-
return cattrs_structure(data, schema_type)
|
|
263
|
-
if isinstance(data, list):
|
|
264
|
-
attrs_result: list[Any] = []
|
|
265
|
-
for item in data:
|
|
266
|
-
if hasattr(item, "keys"):
|
|
267
|
-
attrs_result.append(schema_type(**dict(item)))
|
|
268
|
-
else:
|
|
269
|
-
attrs_result.append(schema_type(**attrs_asdict(item)))
|
|
270
|
-
return attrs_result
|
|
271
|
-
if hasattr(data, "keys"):
|
|
272
|
-
return schema_type(**dict(data))
|
|
273
|
-
if isinstance(data, dict):
|
|
274
|
-
return schema_type(**data)
|
|
275
|
-
return data
|
|
276
|
-
msg = "`schema_type` should be a valid Dataclass, Pydantic model, Msgspec struct, or Attrs class"
|
|
277
|
-
raise SQLSpecError(msg)
|
|
61
|
+
return to_schema(data, schema_type=schema_type)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""SQL translation mixin for cross-database compatibility."""
|
|
2
2
|
|
|
3
|
-
from typing import Final, NoReturn
|
|
3
|
+
from typing import Final, NoReturn
|
|
4
4
|
|
|
5
5
|
from mypy_extensions import trait
|
|
6
6
|
from sqlglot import exp, parse_one
|
|
@@ -20,10 +20,10 @@ class SQLTranslatorMixin:
|
|
|
20
20
|
"""Mixin for drivers supporting SQL translation."""
|
|
21
21
|
|
|
22
22
|
__slots__ = ()
|
|
23
|
-
dialect: "
|
|
23
|
+
dialect: "DialectType | None"
|
|
24
24
|
|
|
25
25
|
def convert_to_dialect(
|
|
26
|
-
self, statement: "Statement", to_dialect: "
|
|
26
|
+
self, statement: "Statement", to_dialect: "DialectType | None" = None, pretty: bool = _DEFAULT_PRETTY
|
|
27
27
|
) -> str:
|
|
28
28
|
"""Convert a statement to a target SQL dialect.
|
|
29
29
|
|
|
@@ -38,7 +38,7 @@ class SQLTranslatorMixin:
|
|
|
38
38
|
|
|
39
39
|
"""
|
|
40
40
|
|
|
41
|
-
parsed_expression:
|
|
41
|
+
parsed_expression: exp.Expression | None = None
|
|
42
42
|
|
|
43
43
|
if statement is not None and isinstance(statement, SQL):
|
|
44
44
|
if statement.expression is None:
|
sqlspec/exceptions.py
CHANGED
|
@@ -1,14 +1,24 @@
|
|
|
1
1
|
from collections.abc import Generator
|
|
2
2
|
from contextlib import contextmanager
|
|
3
|
-
from typing import Any
|
|
3
|
+
from typing import Any
|
|
4
4
|
|
|
5
5
|
__all__ = (
|
|
6
|
+
"CheckViolationError",
|
|
7
|
+
"ConfigResolverError",
|
|
8
|
+
"DataError",
|
|
9
|
+
"DatabaseConnectionError",
|
|
6
10
|
"FileNotFoundInStorageError",
|
|
11
|
+
"ForeignKeyViolationError",
|
|
7
12
|
"ImproperConfigurationError",
|
|
8
13
|
"IntegrityError",
|
|
14
|
+
"InvalidVersionFormatError",
|
|
15
|
+
"MigrationError",
|
|
9
16
|
"MissingDependencyError",
|
|
10
17
|
"MultipleResultsFoundError",
|
|
11
18
|
"NotFoundError",
|
|
19
|
+
"NotNullViolationError",
|
|
20
|
+
"OperationalError",
|
|
21
|
+
"OutOfOrderMigrationError",
|
|
12
22
|
"RepositoryError",
|
|
13
23
|
"SQLBuilderError",
|
|
14
24
|
"SQLConversionError",
|
|
@@ -18,6 +28,8 @@ __all__ = (
|
|
|
18
28
|
"SQLSpecError",
|
|
19
29
|
"SerializationError",
|
|
20
30
|
"StorageOperationFailedError",
|
|
31
|
+
"TransactionError",
|
|
32
|
+
"UniqueViolationError",
|
|
21
33
|
)
|
|
22
34
|
|
|
23
35
|
|
|
@@ -54,7 +66,7 @@ class SQLSpecError(Exception):
|
|
|
54
66
|
class MissingDependencyError(SQLSpecError, ImportError):
|
|
55
67
|
"""Raised when a required dependency is not installed."""
|
|
56
68
|
|
|
57
|
-
def __init__(self, package: str, install_package:
|
|
69
|
+
def __init__(self, package: str, install_package: str | None = None) -> None:
|
|
58
70
|
super().__init__(
|
|
59
71
|
f"Package {package!r} is not installed but required. You can install it by running "
|
|
60
72
|
f"'pip install sqlspec[{install_package or package}]' to install sqlspec with the required extra "
|
|
@@ -69,10 +81,14 @@ class BackendNotRegisteredError(SQLSpecError):
|
|
|
69
81
|
super().__init__(f"Storage backend '{backend_key}' is not registered. Please register it before use.")
|
|
70
82
|
|
|
71
83
|
|
|
84
|
+
class ConfigResolverError(SQLSpecError):
|
|
85
|
+
"""Exception raised when config resolution fails."""
|
|
86
|
+
|
|
87
|
+
|
|
72
88
|
class SQLParsingError(SQLSpecError):
|
|
73
89
|
"""Issues parsing SQL statements."""
|
|
74
90
|
|
|
75
|
-
def __init__(self, message:
|
|
91
|
+
def __init__(self, message: str | None = None) -> None:
|
|
76
92
|
if message is None:
|
|
77
93
|
message = "Issues parsing SQL statement."
|
|
78
94
|
super().__init__(message)
|
|
@@ -81,7 +97,7 @@ class SQLParsingError(SQLSpecError):
|
|
|
81
97
|
class SQLBuilderError(SQLSpecError):
|
|
82
98
|
"""Issues Building or Generating SQL statements."""
|
|
83
99
|
|
|
84
|
-
def __init__(self, message:
|
|
100
|
+
def __init__(self, message: str | None = None) -> None:
|
|
85
101
|
if message is None:
|
|
86
102
|
message = "Issues building SQL statement."
|
|
87
103
|
super().__init__(message)
|
|
@@ -90,7 +106,7 @@ class SQLBuilderError(SQLSpecError):
|
|
|
90
106
|
class SQLConversionError(SQLSpecError):
|
|
91
107
|
"""Issues converting SQL statements."""
|
|
92
108
|
|
|
93
|
-
def __init__(self, message:
|
|
109
|
+
def __init__(self, message: str | None = None) -> None:
|
|
94
110
|
if message is None:
|
|
95
111
|
message = "Issues converting SQL statement."
|
|
96
112
|
super().__init__(message)
|
|
@@ -120,6 +136,38 @@ class MultipleResultsFoundError(RepositoryError):
|
|
|
120
136
|
"""A single database result was required but more than one were found."""
|
|
121
137
|
|
|
122
138
|
|
|
139
|
+
class UniqueViolationError(IntegrityError):
|
|
140
|
+
"""A unique constraint was violated."""
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class ForeignKeyViolationError(IntegrityError):
|
|
144
|
+
"""A foreign key constraint was violated."""
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
class CheckViolationError(IntegrityError):
|
|
148
|
+
"""A check constraint was violated."""
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
class NotNullViolationError(IntegrityError):
|
|
152
|
+
"""A not-null constraint was violated."""
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
class DatabaseConnectionError(SQLSpecError):
|
|
156
|
+
"""Database connection error (invalid credentials, network failure, etc.)."""
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class TransactionError(SQLSpecError):
|
|
160
|
+
"""Transaction error (rollback, deadlock, serialization failure)."""
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class DataError(SQLSpecError):
|
|
164
|
+
"""Invalid data type or format for database operation."""
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class OperationalError(SQLSpecError):
|
|
168
|
+
"""Operational database error (timeout, disk full, resource limit)."""
|
|
169
|
+
|
|
170
|
+
|
|
123
171
|
class StorageOperationFailedError(SQLSpecError):
|
|
124
172
|
"""Raised when a storage backend operation fails (e.g., network, permission, API error)."""
|
|
125
173
|
|
|
@@ -131,7 +179,7 @@ class FileNotFoundInStorageError(StorageOperationFailedError):
|
|
|
131
179
|
class SQLFileNotFoundError(SQLSpecError):
|
|
132
180
|
"""Raised when a SQL file cannot be found."""
|
|
133
181
|
|
|
134
|
-
def __init__(self, name: str, path: "
|
|
182
|
+
def __init__(self, name: str, path: "str | None" = None) -> None:
|
|
135
183
|
"""Initialize the error.
|
|
136
184
|
|
|
137
185
|
Args:
|
|
@@ -162,9 +210,29 @@ class SQLFileParseError(SQLSpecError):
|
|
|
162
210
|
self.original_error = original_error
|
|
163
211
|
|
|
164
212
|
|
|
213
|
+
class MigrationError(SQLSpecError):
|
|
214
|
+
"""Base exception for migration-related errors."""
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
class InvalidVersionFormatError(MigrationError):
|
|
218
|
+
"""Raised when a migration version format is invalid.
|
|
219
|
+
|
|
220
|
+
Invalid formats include versions that don't match sequential (0001)
|
|
221
|
+
or timestamp (YYYYMMDDHHmmss) patterns, or timestamps with invalid dates.
|
|
222
|
+
"""
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
class OutOfOrderMigrationError(MigrationError):
|
|
226
|
+
"""Raised when an out-of-order migration is detected in strict mode.
|
|
227
|
+
|
|
228
|
+
Out-of-order migrations occur when a pending migration has a timestamp
|
|
229
|
+
earlier than already-applied migrations, typically from late-merging branches.
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
|
|
165
233
|
@contextmanager
|
|
166
234
|
def wrap_exceptions(
|
|
167
|
-
wrap_exceptions: bool = True, suppress: "
|
|
235
|
+
wrap_exceptions: bool = True, suppress: "type[Exception] | tuple[type[Exception], ...] | None" = None
|
|
168
236
|
) -> Generator[None, None, None]:
|
|
169
237
|
"""Context manager for exception handling with optional suppression.
|
|
170
238
|
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""Google ADK session backend extension for SQLSpec.
|
|
2
|
+
|
|
3
|
+
Provides session and event storage for Google Agent Development Kit using
|
|
4
|
+
SQLSpec database adapters.
|
|
5
|
+
|
|
6
|
+
Public API exports:
|
|
7
|
+
- ADKConfig: TypedDict for extension config (type-safe configuration)
|
|
8
|
+
- SQLSpecSessionService: Main service class implementing BaseSessionService
|
|
9
|
+
- BaseAsyncADKStore: Base class for async database store implementations
|
|
10
|
+
- BaseSyncADKStore: Base class for sync database store implementations
|
|
11
|
+
- SessionRecord: TypedDict for session database records
|
|
12
|
+
- EventRecord: TypedDict for event database records
|
|
13
|
+
|
|
14
|
+
Example (with extension_config):
|
|
15
|
+
from sqlspec.adapters.asyncpg import AsyncpgConfig
|
|
16
|
+
from sqlspec.adapters.asyncpg.adk.store import AsyncpgADKStore
|
|
17
|
+
from sqlspec.extensions.adk import SQLSpecSessionService
|
|
18
|
+
|
|
19
|
+
config = AsyncpgConfig(
|
|
20
|
+
pool_config={"dsn": "postgresql://..."},
|
|
21
|
+
extension_config={
|
|
22
|
+
"adk": {
|
|
23
|
+
"session_table": "my_sessions",
|
|
24
|
+
"events_table": "my_events",
|
|
25
|
+
"owner_id_column": "tenant_id INTEGER REFERENCES tenants(id)"
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
store = AsyncpgADKStore(config)
|
|
31
|
+
await store.create_tables()
|
|
32
|
+
|
|
33
|
+
service = SQLSpecSessionService(store)
|
|
34
|
+
session = await service.create_session(
|
|
35
|
+
app_name="my_app",
|
|
36
|
+
user_id="user123",
|
|
37
|
+
state={"key": "value"}
|
|
38
|
+
)
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
from sqlspec.config import ADKConfig
|
|
42
|
+
from sqlspec.extensions.adk._types import EventRecord, SessionRecord
|
|
43
|
+
from sqlspec.extensions.adk.service import SQLSpecSessionService
|
|
44
|
+
from sqlspec.extensions.adk.store import BaseAsyncADKStore, BaseSyncADKStore
|
|
45
|
+
|
|
46
|
+
__all__ = (
|
|
47
|
+
"ADKConfig",
|
|
48
|
+
"BaseAsyncADKStore",
|
|
49
|
+
"BaseSyncADKStore",
|
|
50
|
+
"EventRecord",
|
|
51
|
+
"SQLSpecSessionService",
|
|
52
|
+
"SessionRecord",
|
|
53
|
+
)
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""Type definitions for ADK extension.
|
|
2
|
+
|
|
3
|
+
These types define the database record structures for storing sessions and events.
|
|
4
|
+
They are separate from the Pydantic models to keep mypyc compilation working.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from typing import Any, TypedDict
|
|
9
|
+
|
|
10
|
+
__all__ = ("EventRecord", "SessionRecord")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SessionRecord(TypedDict):
|
|
14
|
+
"""Database record for a session.
|
|
15
|
+
|
|
16
|
+
Represents the schema for sessions stored in the database.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
id: str
|
|
20
|
+
app_name: str
|
|
21
|
+
user_id: str
|
|
22
|
+
state: "dict[str, Any]"
|
|
23
|
+
create_time: datetime
|
|
24
|
+
update_time: datetime
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class EventRecord(TypedDict):
|
|
28
|
+
"""Database record for an event.
|
|
29
|
+
|
|
30
|
+
Represents the schema for events stored in the database.
|
|
31
|
+
Follows the ADK Event model plus session metadata.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
id: str
|
|
35
|
+
app_name: str
|
|
36
|
+
user_id: str
|
|
37
|
+
session_id: str
|
|
38
|
+
invocation_id: str
|
|
39
|
+
author: str
|
|
40
|
+
branch: "str | None"
|
|
41
|
+
actions: bytes
|
|
42
|
+
long_running_tool_ids_json: "str | None"
|
|
43
|
+
timestamp: datetime
|
|
44
|
+
content: "dict[str, Any] | None"
|
|
45
|
+
grounding_metadata: "dict[str, Any] | None"
|
|
46
|
+
custom_metadata: "dict[str, Any] | None"
|
|
47
|
+
partial: "bool | None"
|
|
48
|
+
turn_complete: "bool | None"
|
|
49
|
+
interrupted: "bool | None"
|
|
50
|
+
error_code: "str | None"
|
|
51
|
+
error_message: "str | None"
|