sqlspec 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sqlspec might be problematic. Click here for more details.
- sqlspec/__init__.py +1 -0
- sqlspec/__metadata__.py +18 -0
- sqlspec/_serialization.py +24 -0
- sqlspec/exceptions.py +74 -0
- sqlspec/filters.py +121 -0
- sqlspec/py.typed +0 -0
- sqlspec/types/__init__.py +0 -0
- sqlspec/types/empty.py +18 -0
- sqlspec/types/protocols.py +117 -0
- sqlspec/utils/__init__.py +0 -0
- sqlspec/utils/dataclass.py +130 -0
- sqlspec-0.1.0.dist-info/METADATA +25 -0
- sqlspec-0.1.0.dist-info/RECORD +14 -0
- sqlspec-0.1.0.dist-info/WHEEL +4 -0
sqlspec/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from __future__ import annotations
|
sqlspec/__metadata__.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Metadata for the Project."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from importlib.metadata import PackageNotFoundError, metadata, version
|
|
6
|
+
|
|
7
|
+
__all__ = ["__version__", "__project__"]
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
__version__ = version("sqlspec")
|
|
11
|
+
"""Version of the project."""
|
|
12
|
+
__project__ = metadata("sqlspec")["Name"]
|
|
13
|
+
"""Name of the project."""
|
|
14
|
+
except PackageNotFoundError: # pragma: no cover
|
|
15
|
+
__version__ = "0.0.1"
|
|
16
|
+
__project__ = "SQLSpec"
|
|
17
|
+
finally:
|
|
18
|
+
del version, PackageNotFoundError, metadata
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
__all__ = ("decode_json", "encode_json")
|
|
4
|
+
|
|
5
|
+
try:
|
|
6
|
+
from msgspec.json import Decoder, Encoder # pyright: ignore[reportMissingImports]
|
|
7
|
+
|
|
8
|
+
encoder, decoder = Encoder(), Decoder()
|
|
9
|
+
decode_json = decoder.decode
|
|
10
|
+
|
|
11
|
+
def encode_json(data: Any) -> str:
|
|
12
|
+
return encoder.encode(data).decode("utf-8")
|
|
13
|
+
|
|
14
|
+
except ImportError:
|
|
15
|
+
try:
|
|
16
|
+
from orjson import dumps as _encode_json # pyright: ignore[reportMissingImports]
|
|
17
|
+
from orjson import loads as decode_json # type: ignore[no-redef,assignment]
|
|
18
|
+
|
|
19
|
+
def encode_json(data: Any) -> str:
|
|
20
|
+
return _encode_json(data).decode("utf-8") # type: ignore[no-any-return]
|
|
21
|
+
|
|
22
|
+
except ImportError:
|
|
23
|
+
from json import dumps as encode_json # type: ignore[assignment]
|
|
24
|
+
from json import loads as decode_json # type: ignore[assignment]
|
sqlspec/exceptions.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class SQLSpecError(Exception):
|
|
7
|
+
"""Base exception class from which all Advanced Alchemy exceptions inherit."""
|
|
8
|
+
|
|
9
|
+
detail: str
|
|
10
|
+
|
|
11
|
+
def __init__(self, *args: Any, detail: str = "") -> None:
|
|
12
|
+
"""Initialize ``AdvancedAlchemyException``.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
*args: args are converted to :class:`str` before passing to :class:`Exception`
|
|
16
|
+
detail: detail of the exception.
|
|
17
|
+
"""
|
|
18
|
+
str_args = [str(arg) for arg in args if arg]
|
|
19
|
+
if not detail:
|
|
20
|
+
if str_args:
|
|
21
|
+
detail, *str_args = str_args
|
|
22
|
+
elif hasattr(self, "detail"):
|
|
23
|
+
detail = self.detail
|
|
24
|
+
self.detail = detail
|
|
25
|
+
super().__init__(*str_args)
|
|
26
|
+
|
|
27
|
+
def __repr__(self) -> str:
|
|
28
|
+
if self.detail:
|
|
29
|
+
return f"{self.__class__.__name__} - {self.detail}"
|
|
30
|
+
return self.__class__.__name__
|
|
31
|
+
|
|
32
|
+
def __str__(self) -> str:
|
|
33
|
+
return " ".join((*self.args, self.detail)).strip()
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class MissingDependencyError(SQLSpecError, ImportError):
|
|
37
|
+
"""Missing optional dependency.
|
|
38
|
+
|
|
39
|
+
This exception is raised only when a module depends on a dependency that has not been installed.
|
|
40
|
+
"""
|
|
41
|
+
|
|
42
|
+
def __init__(self, package: str, install_package: str | None = None) -> None:
|
|
43
|
+
super().__init__(
|
|
44
|
+
f"Package {package!r} is not installed but required. You can install it by running "
|
|
45
|
+
f"'pip install sqlspec[{install_package or package}]' to install sqlspec with the required extra "
|
|
46
|
+
f"or 'pip install {install_package or package}' to install the package separately",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class ImproperConfigurationError(SQLSpecError):
|
|
51
|
+
"""Improper Configuration error.
|
|
52
|
+
|
|
53
|
+
This exception is raised only when a module depends on a dependency that has not been installed.
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class SerializationError(SQLSpecError):
|
|
58
|
+
"""Encoding or decoding of an object failed."""
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class RepositoryError(SQLSpecError):
|
|
62
|
+
"""Base repository exception type."""
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class IntegrityError(RepositoryError):
|
|
66
|
+
"""Data integrity error."""
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class NotFoundError(RepositoryError):
|
|
70
|
+
"""An identity does not exist."""
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class MultipleResultsFoundError(RepositoryError):
|
|
74
|
+
"""A single database result was required but more than one were found."""
|
sqlspec/filters.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""Collection filter datastructures."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections import abc # noqa: TCH003
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from datetime import datetime # noqa: TCH003
|
|
8
|
+
from typing import TYPE_CHECKING, Any, Generic, Literal, TypeVar
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from typing_extensions import TypeAlias
|
|
12
|
+
|
|
13
|
+
T = TypeVar("T")
|
|
14
|
+
|
|
15
|
+
__all__ = (
|
|
16
|
+
"BeforeAfter",
|
|
17
|
+
"CollectionFilter",
|
|
18
|
+
"FilterTypes",
|
|
19
|
+
"LimitOffset",
|
|
20
|
+
"NotInCollectionFilter",
|
|
21
|
+
"NotInSearchFilter",
|
|
22
|
+
"OnBeforeAfter",
|
|
23
|
+
"OrderBy",
|
|
24
|
+
"SearchFilter",
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
FilterTypes: TypeAlias = "BeforeAfter | OnBeforeAfter | CollectionFilter[Any] | LimitOffset | OrderBy | SearchFilter | NotInCollectionFilter[Any] | NotInSearchFilter"
|
|
29
|
+
"""Aggregate type alias of the types supported for collection filtering."""
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class BeforeAfter:
|
|
34
|
+
"""Data required to filter a query on a ``datetime`` column."""
|
|
35
|
+
|
|
36
|
+
field_name: str
|
|
37
|
+
"""Name of the model attribute to filter on."""
|
|
38
|
+
before: datetime | None
|
|
39
|
+
"""Filter results where field earlier than this."""
|
|
40
|
+
after: datetime | None
|
|
41
|
+
"""Filter results where field later than this."""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class OnBeforeAfter:
|
|
46
|
+
"""Data required to filter a query on a ``datetime`` column."""
|
|
47
|
+
|
|
48
|
+
field_name: str
|
|
49
|
+
"""Name of the model attribute to filter on."""
|
|
50
|
+
on_or_before: datetime | None
|
|
51
|
+
"""Filter results where field is on or earlier than this."""
|
|
52
|
+
on_or_after: datetime | None
|
|
53
|
+
"""Filter results where field on or later than this."""
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
@dataclass
|
|
57
|
+
class CollectionFilter(Generic[T]):
|
|
58
|
+
"""Data required to construct a ``WHERE ... IN (...)`` clause."""
|
|
59
|
+
|
|
60
|
+
field_name: str
|
|
61
|
+
"""Name of the model attribute to filter on."""
|
|
62
|
+
values: abc.Collection[T] | None
|
|
63
|
+
"""Values for ``IN`` clause.
|
|
64
|
+
|
|
65
|
+
An empty list will return an empty result set, however, if ``None``, the filter is not applied to the query, and all rows are returned. """
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class NotInCollectionFilter(Generic[T]):
|
|
70
|
+
"""Data required to construct a ``WHERE ... NOT IN (...)`` clause."""
|
|
71
|
+
|
|
72
|
+
field_name: str
|
|
73
|
+
"""Name of the model attribute to filter on."""
|
|
74
|
+
values: abc.Collection[T] | None
|
|
75
|
+
"""Values for ``NOT IN`` clause.
|
|
76
|
+
|
|
77
|
+
An empty list or ``None`` will return all rows."""
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@dataclass
|
|
81
|
+
class LimitOffset:
|
|
82
|
+
"""Data required to add limit/offset filtering to a query."""
|
|
83
|
+
|
|
84
|
+
limit: int
|
|
85
|
+
"""Value for ``LIMIT`` clause of query."""
|
|
86
|
+
offset: int
|
|
87
|
+
"""Value for ``OFFSET`` clause of query."""
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@dataclass
|
|
91
|
+
class OrderBy:
|
|
92
|
+
"""Data required to construct a ``ORDER BY ...`` clause."""
|
|
93
|
+
|
|
94
|
+
field_name: str
|
|
95
|
+
"""Name of the model attribute to sort on."""
|
|
96
|
+
sort_order: Literal["asc", "desc"] = "asc"
|
|
97
|
+
"""Sort ascending or descending"""
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
@dataclass
|
|
101
|
+
class SearchFilter:
|
|
102
|
+
"""Data required to construct a ``WHERE field_name LIKE '%' || :value || '%'`` clause."""
|
|
103
|
+
|
|
104
|
+
field_name: str
|
|
105
|
+
"""Name of the model attribute to sort on."""
|
|
106
|
+
value: str
|
|
107
|
+
"""Values for ``LIKE`` clause."""
|
|
108
|
+
ignore_case: bool | None = False
|
|
109
|
+
"""Should the search be case insensitive."""
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@dataclass
|
|
113
|
+
class NotInSearchFilter:
|
|
114
|
+
"""Data required to construct a ``WHERE field_name NOT LIKE '%' || :value || '%'`` clause."""
|
|
115
|
+
|
|
116
|
+
field_name: str
|
|
117
|
+
"""Name of the model attribute to search on."""
|
|
118
|
+
value: str
|
|
119
|
+
"""Values for ``NOT LIKE`` clause."""
|
|
120
|
+
ignore_case: bool | None = False
|
|
121
|
+
"""Should the search be case insensitive."""
|
sqlspec/py.typed
ADDED
|
File without changes
|
|
File without changes
|
sqlspec/types/empty.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Final, Literal, Union
|
|
5
|
+
|
|
6
|
+
from msgspec import UnsetType # pyright: ignore[reportMissingImports]
|
|
7
|
+
|
|
8
|
+
__all__ = ("Empty", "EmptyType")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class _EmptyEnum(Enum):
|
|
12
|
+
"""A sentinel enum used as placeholder."""
|
|
13
|
+
|
|
14
|
+
EMPTY = 0
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
EmptyType = Union[Literal[_EmptyEnum.EMPTY], UnsetType]
|
|
18
|
+
Empty: Final = _EmptyEnum.EMPTY
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
# SPDX-FileCopyrightText: 2023-present Cody Fincher <codyfincher@google.com>
|
|
2
|
+
#
|
|
3
|
+
# SPDX-License-Identifier: MIT
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
from collections.abc import Collection, Iterable
|
|
7
|
+
from typing import Any, ClassVar, Protocol, TypeVar, runtime_checkable
|
|
8
|
+
|
|
9
|
+
__all__ = (
|
|
10
|
+
"DataclassProtocol",
|
|
11
|
+
"InstantiableCollection",
|
|
12
|
+
"Logger",
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Logger(Protocol):
|
|
17
|
+
"""Logger protocol."""
|
|
18
|
+
|
|
19
|
+
def debug(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
20
|
+
"""Output a log message at 'DEBUG' level.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
event: Log message.
|
|
24
|
+
*args: Any args.
|
|
25
|
+
**kwargs: Any kwargs.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def info(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
29
|
+
"""Output a log message at 'INFO' level.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
event: Log message.
|
|
33
|
+
*args: Any args.
|
|
34
|
+
**kwargs: Any kwargs.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def warning(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
38
|
+
"""Output a log message at 'WARNING' level.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
event: Log message.
|
|
42
|
+
*args: Any args.
|
|
43
|
+
**kwargs: Any kwargs.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
def warn(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
47
|
+
"""Output a log message at 'WARN' level.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
event: Log message.
|
|
51
|
+
*args: Any args.
|
|
52
|
+
**kwargs: Any kwargs.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def error(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
56
|
+
"""Output a log message at 'ERROR' level.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
event: Log message.
|
|
60
|
+
*args: Any args.
|
|
61
|
+
**kwargs: Any kwargs.
|
|
62
|
+
"""
|
|
63
|
+
|
|
64
|
+
def fatal(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
65
|
+
"""Output a log message at 'FATAL' level.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
event: Log message.
|
|
69
|
+
*args: Any args.
|
|
70
|
+
**kwargs: Any kwargs.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
def exception(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
74
|
+
"""Log a message with level 'ERROR' on this logger. The arguments are interpreted as for debug(). Exception info
|
|
75
|
+
is added to the logging message.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
event: Log message.
|
|
79
|
+
*args: Any args.
|
|
80
|
+
**kwargs: Any kwargs.
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
def critical(self, event: str, *args: Any, **kwargs: Any) -> Any:
|
|
84
|
+
"""Output a log message at 'INFO' level.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
event: Log message.
|
|
88
|
+
*args: Any args.
|
|
89
|
+
**kwargs: Any kwargs.
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
def setLevel(self, level: int) -> None: # noqa: N802
|
|
93
|
+
"""Set the log level
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
level: Log level to set as an integer
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
None
|
|
100
|
+
"""
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@runtime_checkable
|
|
104
|
+
class DataclassProtocol(Protocol):
|
|
105
|
+
"""Protocol for instance checking dataclasses"""
|
|
106
|
+
|
|
107
|
+
__dataclass_fields__: ClassVar[dict[str, Any]]
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
T_co = TypeVar("T_co", covariant=True)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
@runtime_checkable
|
|
114
|
+
class InstantiableCollection(Collection[T_co], Protocol[T_co]): # pyright: ignore
|
|
115
|
+
"""A protocol for instantiable collection types."""
|
|
116
|
+
|
|
117
|
+
def __init__(self, iterable: Iterable[T_co], /) -> None: ...
|
|
File without changes
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import Field, fields
|
|
4
|
+
from typing import TYPE_CHECKING, TypeGuard
|
|
5
|
+
|
|
6
|
+
from sqlspec.types.empty import Empty
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from collections.abc import Iterable
|
|
10
|
+
from collections.abc import Set as AbstractSet
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from sqlspec.types.protocols import DataclassProtocol
|
|
14
|
+
|
|
15
|
+
__all__ = (
|
|
16
|
+
"extract_dataclass_fields",
|
|
17
|
+
"extract_dataclass_items",
|
|
18
|
+
"is_dataclass_instance",
|
|
19
|
+
"simple_asdict",
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def is_dataclass_instance(obj: Any) -> TypeGuard[DataclassProtocol]:
|
|
24
|
+
"""Check if an object is a dataclass instance.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
obj: An object to check.
|
|
28
|
+
|
|
29
|
+
Returns:
|
|
30
|
+
True if the object is a dataclass instance.
|
|
31
|
+
"""
|
|
32
|
+
return hasattr(type(obj), "__dataclass_fields__")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def extract_dataclass_fields(
|
|
36
|
+
dt: DataclassProtocol,
|
|
37
|
+
exclude_none: bool = False,
|
|
38
|
+
exclude_empty: bool = False,
|
|
39
|
+
include: AbstractSet[str] | None = None,
|
|
40
|
+
exclude: AbstractSet[str] | None = None,
|
|
41
|
+
) -> tuple[Field[Any], ...]:
|
|
42
|
+
"""Extract dataclass fields.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
dt: A dataclass instance.
|
|
46
|
+
exclude_none: Whether to exclude None values.
|
|
47
|
+
exclude_empty: Whether to exclude Empty values.
|
|
48
|
+
include: An iterable of fields to include.
|
|
49
|
+
exclude: An iterable of fields to exclude.
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
A tuple of dataclass fields.
|
|
54
|
+
"""
|
|
55
|
+
include = include or set()
|
|
56
|
+
exclude = exclude or set()
|
|
57
|
+
|
|
58
|
+
if common := (include & exclude):
|
|
59
|
+
msg = f"Fields {common} are both included and excluded."
|
|
60
|
+
raise ValueError(msg)
|
|
61
|
+
|
|
62
|
+
dataclass_fields: Iterable[Field[Any]] = fields(dt)
|
|
63
|
+
if exclude_none:
|
|
64
|
+
dataclass_fields = (field for field in dataclass_fields if getattr(dt, field.name) is not None)
|
|
65
|
+
if exclude_empty:
|
|
66
|
+
dataclass_fields = (field for field in dataclass_fields if getattr(dt, field.name) is not Empty)
|
|
67
|
+
if include:
|
|
68
|
+
dataclass_fields = (field for field in dataclass_fields if field.name in include)
|
|
69
|
+
if exclude:
|
|
70
|
+
dataclass_fields = (field for field in dataclass_fields if field.name not in exclude)
|
|
71
|
+
|
|
72
|
+
return tuple(dataclass_fields)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def extract_dataclass_items(
|
|
76
|
+
dt: DataclassProtocol,
|
|
77
|
+
exclude_none: bool = False,
|
|
78
|
+
exclude_empty: bool = False,
|
|
79
|
+
include: AbstractSet[str] | None = None,
|
|
80
|
+
exclude: AbstractSet[str] | None = None,
|
|
81
|
+
) -> tuple[tuple[str, Any], ...]:
|
|
82
|
+
"""Extract dataclass name, value pairs.
|
|
83
|
+
|
|
84
|
+
Unlike the 'asdict' method exports by the stdlib, this function does not pickle values.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
dt: A dataclass instance.
|
|
88
|
+
exclude_none: Whether to exclude None values.
|
|
89
|
+
exclude_empty: Whether to exclude Empty values.
|
|
90
|
+
include: An iterable of fields to include.
|
|
91
|
+
exclude: An iterable of fields to exclude.
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
A tuple of key/value pairs.
|
|
95
|
+
"""
|
|
96
|
+
dataclass_fields = extract_dataclass_fields(dt, exclude_none, exclude_empty, include, exclude)
|
|
97
|
+
return tuple((field.name, getattr(dt, field.name)) for field in dataclass_fields)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def simple_asdict(
|
|
101
|
+
obj: DataclassProtocol,
|
|
102
|
+
exclude_none: bool = False,
|
|
103
|
+
exclude_empty: bool = False,
|
|
104
|
+
convert_nested: bool = True,
|
|
105
|
+
exclude: set[str] | None = None,
|
|
106
|
+
) -> dict[str, Any]:
|
|
107
|
+
"""Convert a dataclass to a dictionary.
|
|
108
|
+
|
|
109
|
+
This method has important differences to the standard library version:
|
|
110
|
+
- it does not deepcopy values
|
|
111
|
+
- it does not recurse into collections
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
obj: A dataclass instance.
|
|
115
|
+
exclude_none: Whether to exclude None values.
|
|
116
|
+
exclude_empty: Whether to exclude Empty values.
|
|
117
|
+
convert_nested: Whether to recursively convert nested dataclasses.
|
|
118
|
+
exclude: An iterable of fields to exclude.
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
A dictionary of key/value pairs.
|
|
122
|
+
"""
|
|
123
|
+
ret = {}
|
|
124
|
+
for field in extract_dataclass_fields(obj, exclude_none, exclude_empty, exclude=exclude):
|
|
125
|
+
value = getattr(obj, field.name)
|
|
126
|
+
if is_dataclass_instance(value) and convert_nested:
|
|
127
|
+
ret[field.name] = simple_asdict(value, exclude_none, exclude_empty)
|
|
128
|
+
else:
|
|
129
|
+
ret[field.name] = getattr(obj, field.name)
|
|
130
|
+
return ret
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: sqlspec
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: SQL Expiriments in Python
|
|
5
|
+
Author-email: Cody Fincher <cody@litestar.dev>
|
|
6
|
+
Maintainer-email: Litestar Developers <hello@litestar.dev>
|
|
7
|
+
Requires-Python: <4.0,>=3.9
|
|
8
|
+
Requires-Dist: eval-type-backport; python_version <= '3.9'
|
|
9
|
+
Requires-Dist: typing-extensions>=4.0.0
|
|
10
|
+
Description-Content-Type: text/markdown
|
|
11
|
+
|
|
12
|
+
<!-- markdownlint-disable -->
|
|
13
|
+
<p align="center">
|
|
14
|
+
<!-- github-banner-start -->
|
|
15
|
+
<img src="https://raw.githubusercontent.com/litestar-org/branding/main/assets/Branding%20-%20SVG%20-%20Transparent/Logo%20-%20Banner%20-%20Inline%20-%20Light.svg#gh-light-mode-only" alt="Litestar Logo - Light" width="100%" height="auto" />
|
|
16
|
+
<img src="https://raw.githubusercontent.com/litestar-org/branding/main/assets/Branding%20-%20SVG%20-%20Transparent/Logo%20-%20Banner%20-%20Inline%20-%20Dark.svg#gh-dark-mode-only" alt="Litestar Logo - Dark" width="100%" height="auto" />
|
|
17
|
+
<!-- github-banner-end -->
|
|
18
|
+
|
|
19
|
+
</p>
|
|
20
|
+
<div align="center">
|
|
21
|
+
<!-- markdownlint-restore -->
|
|
22
|
+
|
|
23
|
+
# SQLSpec
|
|
24
|
+
|
|
25
|
+
SQL Expiriments in Python
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
sqlspec/__init__.py,sha256=U4S_2y3zgLZVfMenHRaJFBW8yqh2mUBuI291LGQVOJ8,35
|
|
2
|
+
sqlspec/__metadata__.py,sha256=IKK7ARcZFaxFXHrjAgeneCvbZDQi5a-6Es32B8nIkTc,496
|
|
3
|
+
sqlspec/_serialization.py,sha256=p6CadqKxifALuKUQW_YCJQSzy4NIHj_NeKUTXY95r3s,835
|
|
4
|
+
sqlspec/exceptions.py,sha256=wfOqLdCmOBpQEkDlMlIAUYItYgTwY5YDmWiHnrBDZBg,2290
|
|
5
|
+
sqlspec/filters.py,sha256=1QeJkY8e4z6VPF0T9qn7xsiDKLKGWtZecg80iZZWdl0,3404
|
|
6
|
+
sqlspec/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
|
+
sqlspec/types/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
sqlspec/types/empty.py,sha256=5iijfAjHHzAVwWlkfea8woUWilHA6nJK_GN1MVedmS0,383
|
|
9
|
+
sqlspec/types/protocols.py,sha256=skczeIQzjvFbgdpnHdGJR_iTTDYJj0M5sef2vJby_es,3093
|
|
10
|
+
sqlspec/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
sqlspec/utils/dataclass.py,sha256=swzzYjDgIS0OkmGH5z33P1HFkzvspyamvWGyI25oNGE,4129
|
|
12
|
+
sqlspec-0.1.0.dist-info/METADATA,sha256=INa6CEObNsnM3zXee5nKmiD7FzZ8uIdFUzp8UOpgPGk,1045
|
|
13
|
+
sqlspec-0.1.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
|
|
14
|
+
sqlspec-0.1.0.dist-info/RECORD,,
|